@artemiskit/cli 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/CHANGELOG.md +62 -0
  2. package/artemis-runs/my-project/-sEsU7KtJ7VE.json +188 -0
  3. package/bin/artemis.ts +13 -0
  4. package/dist/bin/artemis.d.ts +6 -0
  5. package/dist/bin/artemis.d.ts.map +1 -0
  6. package/dist/index.js +51297 -0
  7. package/dist/src/adapters.d.ts +6 -0
  8. package/dist/src/adapters.d.ts.map +1 -0
  9. package/dist/src/cli.d.ts +6 -0
  10. package/dist/src/cli.d.ts.map +1 -0
  11. package/dist/src/commands/compare.d.ts +6 -0
  12. package/dist/src/commands/compare.d.ts.map +1 -0
  13. package/dist/src/commands/history.d.ts +6 -0
  14. package/dist/src/commands/history.d.ts.map +1 -0
  15. package/dist/src/commands/index.d.ts +8 -0
  16. package/dist/src/commands/index.d.ts.map +1 -0
  17. package/dist/src/commands/init.d.ts +6 -0
  18. package/dist/src/commands/init.d.ts.map +1 -0
  19. package/dist/src/commands/redteam.d.ts +6 -0
  20. package/dist/src/commands/redteam.d.ts.map +1 -0
  21. package/dist/src/commands/report.d.ts +6 -0
  22. package/dist/src/commands/report.d.ts.map +1 -0
  23. package/dist/src/commands/run.d.ts +6 -0
  24. package/dist/src/commands/run.d.ts.map +1 -0
  25. package/dist/src/commands/stress.d.ts +6 -0
  26. package/dist/src/commands/stress.d.ts.map +1 -0
  27. package/dist/src/config/index.d.ts +6 -0
  28. package/dist/src/config/index.d.ts.map +1 -0
  29. package/dist/src/config/loader.d.ts +13 -0
  30. package/dist/src/config/loader.d.ts.map +1 -0
  31. package/dist/src/config/schema.d.ts +215 -0
  32. package/dist/src/config/schema.d.ts.map +1 -0
  33. package/dist/src/index.d.ts +6 -0
  34. package/dist/src/index.d.ts.map +1 -0
  35. package/dist/src/utils/adapter.d.ts +71 -0
  36. package/dist/src/utils/adapter.d.ts.map +1 -0
  37. package/dist/src/utils/storage.d.ts +22 -0
  38. package/dist/src/utils/storage.d.ts.map +1 -0
  39. package/package.json +65 -0
  40. package/src/adapters.ts +33 -0
  41. package/src/cli.ts +34 -0
  42. package/src/commands/compare.ts +104 -0
  43. package/src/commands/history.ts +80 -0
  44. package/src/commands/index.ts +8 -0
  45. package/src/commands/init.ts +111 -0
  46. package/src/commands/redteam.ts +511 -0
  47. package/src/commands/report.ts +126 -0
  48. package/src/commands/run.ts +233 -0
  49. package/src/commands/stress.ts +501 -0
  50. package/src/config/index.ts +6 -0
  51. package/src/config/loader.ts +112 -0
  52. package/src/config/schema.ts +56 -0
  53. package/src/index.ts +6 -0
  54. package/src/utils/adapter.ts +542 -0
  55. package/src/utils/storage.ts +67 -0
  56. package/tsconfig.json +13 -0
@@ -0,0 +1,542 @@
1
+ /**
2
+ * Shared adapter configuration builder
3
+ *
4
+ * Config precedence (top wins):
5
+ * 1. CLI options (--provider, --model, etc.)
6
+ * 2. Scenario providerConfig
7
+ * 3. artemis.config.yaml providers.<provider>
8
+ * 4. Environment variables
9
+ * 5. Defaults
10
+ */
11
+
12
+ import type { AdapterConfig } from '@artemiskit/core';
13
+ import type { ConfigSource, ResolvedConfig } from '@artemiskit/core';
14
+ import type { ProviderConfig } from '@artemiskit/core';
15
+ import type { ArtemisConfig } from '../config/schema';
16
+
17
+ export interface AdapterConfigOptions {
18
+ /** Provider name from CLI or scenario */
19
+ provider: string;
20
+ /** Model name from CLI or scenario (display/identifier) */
21
+ model?: string;
22
+ /** Temperature from CLI */
23
+ temperature?: number;
24
+ /** Max tokens from CLI */
25
+ maxTokens?: number;
26
+ /** Provider config from scenario file */
27
+ scenarioConfig?: ProviderConfig;
28
+ /** Config from artemis.config.yaml */
29
+ fileConfig?: ArtemisConfig | null;
30
+ /** Source of provider (for tracking) */
31
+ providerSource?: ConfigSource;
32
+ /** Source of model (for tracking) */
33
+ modelSource?: ConfigSource;
34
+ }
35
+
36
+ interface ResolvedValue<T> {
37
+ value: T | undefined;
38
+ source: ConfigSource | undefined;
39
+ }
40
+
41
+ /**
42
+ * Result of building adapter configuration
43
+ */
44
+ export interface AdapterConfigResult {
45
+ /** Adapter configuration for creating the client */
46
+ adapterConfig: AdapterConfig;
47
+ /** Resolved configuration with source tracking for manifest */
48
+ resolvedConfig: ResolvedConfig;
49
+ }
50
+
51
+ /**
52
+ * Build adapter configuration with proper precedence and source tracking
53
+ *
54
+ * Resolution order for each field:
55
+ * CLI > scenarioConfig > fileConfig.providers[provider] > environment variable > default
56
+ */
57
+ export function buildAdapterConfig(options: AdapterConfigOptions): AdapterConfigResult {
58
+ const {
59
+ provider,
60
+ model,
61
+ temperature,
62
+ maxTokens,
63
+ scenarioConfig,
64
+ fileConfig,
65
+ providerSource = 'cli',
66
+ modelSource,
67
+ } = options;
68
+
69
+ // Get provider-specific config from artemis.config.yaml
70
+ const fileProviderConfig = fileConfig?.providers?.[provider];
71
+
72
+ // Build resolved config based on provider
73
+ switch (provider) {
74
+ case 'openai':
75
+ return buildOpenAIConfig({
76
+ provider,
77
+ providerSource,
78
+ model,
79
+ modelSource,
80
+ temperature,
81
+ maxTokens,
82
+ scenarioConfig,
83
+ fileProviderConfig,
84
+ });
85
+
86
+ case 'azure-openai':
87
+ return buildAzureOpenAIConfig({
88
+ provider,
89
+ providerSource,
90
+ model,
91
+ modelSource,
92
+ temperature,
93
+ maxTokens,
94
+ scenarioConfig,
95
+ fileProviderConfig,
96
+ });
97
+
98
+ case 'vercel-ai':
99
+ return buildVercelAIConfig({
100
+ provider,
101
+ providerSource,
102
+ model,
103
+ modelSource,
104
+ temperature,
105
+ maxTokens,
106
+ scenarioConfig,
107
+ fileProviderConfig,
108
+ });
109
+
110
+ case 'anthropic':
111
+ return buildAnthropicConfig({
112
+ provider,
113
+ providerSource,
114
+ model,
115
+ modelSource,
116
+ temperature,
117
+ maxTokens,
118
+ scenarioConfig,
119
+ fileProviderConfig,
120
+ });
121
+
122
+ default:
123
+ // Fallback for unknown providers - treat as OpenAI-compatible
124
+ return buildOpenAIConfig({
125
+ provider,
126
+ providerSource,
127
+ model,
128
+ modelSource,
129
+ temperature,
130
+ maxTokens,
131
+ scenarioConfig,
132
+ fileProviderConfig,
133
+ });
134
+ }
135
+ }
136
+
137
+ interface ProviderBuildOptions {
138
+ provider: string;
139
+ providerSource: ConfigSource;
140
+ model?: string;
141
+ modelSource?: ConfigSource;
142
+ temperature?: number;
143
+ maxTokens?: number;
144
+ scenarioConfig?: ProviderConfig;
145
+ fileProviderConfig?: ProviderConfig;
146
+ }
147
+
148
+ function buildOpenAIConfig(options: ProviderBuildOptions): AdapterConfigResult {
149
+ const {
150
+ provider,
151
+ providerSource,
152
+ model,
153
+ modelSource,
154
+ temperature,
155
+ maxTokens,
156
+ scenarioConfig,
157
+ fileProviderConfig,
158
+ } = options;
159
+
160
+ const resolvedModel = resolveValueWithSource<string>(
161
+ { value: model, source: modelSource },
162
+ { value: scenarioConfig?.defaultModel, source: 'scenario' },
163
+ { value: fileProviderConfig?.defaultModel, source: 'config' }
164
+ );
165
+
166
+ const resolvedBaseUrl = resolveValueWithSource<string>(
167
+ { value: scenarioConfig?.baseUrl, source: 'scenario' },
168
+ { value: fileProviderConfig?.baseUrl, source: 'config' }
169
+ );
170
+
171
+ const resolvedOrganization = resolveValueWithSource<string>(
172
+ { value: scenarioConfig?.organization, source: 'scenario' },
173
+ { value: fileProviderConfig?.organization, source: 'config' },
174
+ { value: process.env.OPENAI_ORGANIZATION, source: 'env' }
175
+ );
176
+
177
+ const resolvedTimeout = resolveValueWithSource<number>(
178
+ { value: scenarioConfig?.timeout, source: 'scenario' },
179
+ { value: fileProviderConfig?.timeout, source: 'config' }
180
+ );
181
+
182
+ const resolvedMaxRetries = resolveValueWithSource<number>(
183
+ { value: scenarioConfig?.maxRetries, source: 'scenario' },
184
+ { value: fileProviderConfig?.maxRetries, source: 'config' }
185
+ );
186
+
187
+ // Temperature and maxTokens only come from CLI options
188
+ const resolvedTemperature = resolveValueWithSource<number>({ value: temperature, source: 'cli' });
189
+
190
+ const resolvedMaxTokens = resolveValueWithSource<number>({ value: maxTokens, source: 'cli' });
191
+
192
+ return {
193
+ adapterConfig: {
194
+ provider: 'openai',
195
+ apiKey: resolveValue(
196
+ scenarioConfig?.apiKey,
197
+ fileProviderConfig?.apiKey,
198
+ process.env.OPENAI_API_KEY
199
+ ),
200
+ baseUrl: resolvedBaseUrl.value,
201
+ organization: resolvedOrganization.value,
202
+ defaultModel: resolvedModel.value,
203
+ timeout: resolvedTimeout.value,
204
+ maxRetries: resolvedMaxRetries.value,
205
+ },
206
+ resolvedConfig: {
207
+ provider,
208
+ model: resolvedModel.value,
209
+ base_url: resolvedBaseUrl.value,
210
+ organization: resolvedOrganization.value,
211
+ timeout: resolvedTimeout.value,
212
+ max_retries: resolvedMaxRetries.value,
213
+ temperature: resolvedTemperature.value,
214
+ max_tokens: resolvedMaxTokens.value,
215
+ source: {
216
+ provider: providerSource,
217
+ model: resolvedModel.source,
218
+ base_url: resolvedBaseUrl.source,
219
+ organization: resolvedOrganization.source,
220
+ timeout: resolvedTimeout.source,
221
+ max_retries: resolvedMaxRetries.source,
222
+ temperature: resolvedTemperature.source,
223
+ max_tokens: resolvedMaxTokens.source,
224
+ },
225
+ },
226
+ };
227
+ }
228
+
229
+ function buildAzureOpenAIConfig(options: ProviderBuildOptions): AdapterConfigResult {
230
+ const {
231
+ provider,
232
+ providerSource,
233
+ model,
234
+ modelSource,
235
+ temperature,
236
+ maxTokens,
237
+ scenarioConfig,
238
+ fileProviderConfig,
239
+ } = options;
240
+
241
+ const resolvedModel = resolveValueWithSource<string>(
242
+ { value: model, source: modelSource },
243
+ { value: scenarioConfig?.defaultModel, source: 'scenario' },
244
+ { value: fileProviderConfig?.defaultModel, source: 'config' }
245
+ );
246
+
247
+ const resolvedResourceName = resolveValueWithSource<string>(
248
+ { value: scenarioConfig?.resourceName, source: 'scenario' },
249
+ { value: fileProviderConfig?.resourceName, source: 'config' },
250
+ { value: process.env.AZURE_OPENAI_RESOURCE, source: 'env' }
251
+ );
252
+
253
+ const resolvedDeploymentName = resolveValueWithSource<string>(
254
+ { value: scenarioConfig?.deploymentName, source: 'scenario' },
255
+ { value: fileProviderConfig?.deploymentName, source: 'config' },
256
+ { value: process.env.AZURE_OPENAI_DEPLOYMENT, source: 'env' }
257
+ );
258
+
259
+ const resolvedApiVersion = resolveValueWithSource<string>(
260
+ { value: scenarioConfig?.apiVersion, source: 'scenario' },
261
+ { value: fileProviderConfig?.apiVersion, source: 'config' },
262
+ { value: process.env.AZURE_OPENAI_API_VERSION, source: 'env' },
263
+ { value: '2024-02-15-preview', source: 'default' }
264
+ );
265
+
266
+ const resolvedTimeout = resolveValueWithSource<number>(
267
+ { value: scenarioConfig?.timeout, source: 'scenario' },
268
+ { value: fileProviderConfig?.timeout, source: 'config' }
269
+ );
270
+
271
+ const resolvedMaxRetries = resolveValueWithSource<number>(
272
+ { value: scenarioConfig?.maxRetries, source: 'scenario' },
273
+ { value: fileProviderConfig?.maxRetries, source: 'config' }
274
+ );
275
+
276
+ // Temperature and maxTokens only come from CLI options
277
+ const resolvedTemperature = resolveValueWithSource<number>({ value: temperature, source: 'cli' });
278
+
279
+ const resolvedMaxTokens = resolveValueWithSource<number>({ value: maxTokens, source: 'cli' });
280
+
281
+ return {
282
+ adapterConfig: {
283
+ provider: 'azure-openai',
284
+ apiKey: resolveValue(
285
+ scenarioConfig?.apiKey,
286
+ fileProviderConfig?.apiKey,
287
+ process.env.AZURE_OPENAI_API_KEY
288
+ ),
289
+ resourceName: resolvedResourceName.value || '',
290
+ deploymentName: resolvedDeploymentName.value || '',
291
+ apiVersion: resolvedApiVersion.value,
292
+ defaultModel: resolvedModel.value,
293
+ timeout: resolvedTimeout.value,
294
+ maxRetries: resolvedMaxRetries.value,
295
+ },
296
+ resolvedConfig: {
297
+ provider,
298
+ model: resolvedModel.value,
299
+ resource_name: resolvedResourceName.value,
300
+ deployment_name: resolvedDeploymentName.value,
301
+ api_version: resolvedApiVersion.value,
302
+ timeout: resolvedTimeout.value,
303
+ max_retries: resolvedMaxRetries.value,
304
+ temperature: resolvedTemperature.value,
305
+ max_tokens: resolvedMaxTokens.value,
306
+ source: {
307
+ provider: providerSource,
308
+ model: resolvedModel.source,
309
+ resource_name: resolvedResourceName.source,
310
+ deployment_name: resolvedDeploymentName.source,
311
+ api_version: resolvedApiVersion.source,
312
+ timeout: resolvedTimeout.source,
313
+ max_retries: resolvedMaxRetries.source,
314
+ temperature: resolvedTemperature.source,
315
+ max_tokens: resolvedMaxTokens.source,
316
+ },
317
+ },
318
+ };
319
+ }
320
+
321
+ function buildVercelAIConfig(options: ProviderBuildOptions): AdapterConfigResult {
322
+ const {
323
+ provider,
324
+ providerSource,
325
+ model,
326
+ modelSource,
327
+ temperature,
328
+ maxTokens,
329
+ scenarioConfig,
330
+ fileProviderConfig,
331
+ } = options;
332
+
333
+ const resolvedModel = resolveValueWithSource<string>(
334
+ { value: model, source: modelSource },
335
+ { value: scenarioConfig?.defaultModel, source: 'scenario' },
336
+ { value: fileProviderConfig?.defaultModel, source: 'config' }
337
+ );
338
+
339
+ const resolvedUnderlyingProvider = resolveValueWithSource<string>(
340
+ { value: scenarioConfig?.underlyingProvider, source: 'scenario' },
341
+ { value: fileProviderConfig?.underlyingProvider, source: 'config' },
342
+ { value: 'openai', source: 'default' }
343
+ );
344
+
345
+ const resolvedTimeout = resolveValueWithSource<number>(
346
+ { value: scenarioConfig?.timeout, source: 'scenario' },
347
+ { value: fileProviderConfig?.timeout, source: 'config' }
348
+ );
349
+
350
+ const resolvedMaxRetries = resolveValueWithSource<number>(
351
+ { value: scenarioConfig?.maxRetries, source: 'scenario' },
352
+ { value: fileProviderConfig?.maxRetries, source: 'config' }
353
+ );
354
+
355
+ // Temperature and maxTokens only come from CLI options
356
+ const resolvedTemperature = resolveValueWithSource<number>({ value: temperature, source: 'cli' });
357
+
358
+ const resolvedMaxTokens = resolveValueWithSource<number>({ value: maxTokens, source: 'cli' });
359
+
360
+ return {
361
+ adapterConfig: {
362
+ provider: 'vercel-ai',
363
+ underlyingProvider: resolvedUnderlyingProvider.value as 'openai' | 'anthropic',
364
+ apiKey: resolveValue(
365
+ scenarioConfig?.apiKey,
366
+ fileProviderConfig?.apiKey,
367
+ process.env.OPENAI_API_KEY
368
+ ),
369
+ defaultModel: resolvedModel.value,
370
+ timeout: resolvedTimeout.value,
371
+ maxRetries: resolvedMaxRetries.value,
372
+ },
373
+ resolvedConfig: {
374
+ provider,
375
+ model: resolvedModel.value,
376
+ underlying_provider: resolvedUnderlyingProvider.value,
377
+ timeout: resolvedTimeout.value,
378
+ max_retries: resolvedMaxRetries.value,
379
+ temperature: resolvedTemperature.value,
380
+ max_tokens: resolvedMaxTokens.value,
381
+ source: {
382
+ provider: providerSource,
383
+ model: resolvedModel.source,
384
+ underlying_provider: resolvedUnderlyingProvider.source,
385
+ timeout: resolvedTimeout.source,
386
+ max_retries: resolvedMaxRetries.source,
387
+ temperature: resolvedTemperature.source,
388
+ max_tokens: resolvedMaxTokens.source,
389
+ },
390
+ },
391
+ };
392
+ }
393
+
394
+ function buildAnthropicConfig(options: ProviderBuildOptions): AdapterConfigResult {
395
+ const {
396
+ provider,
397
+ providerSource,
398
+ model,
399
+ modelSource,
400
+ temperature,
401
+ maxTokens,
402
+ scenarioConfig,
403
+ fileProviderConfig,
404
+ } = options;
405
+
406
+ const resolvedModel = resolveValueWithSource<string>(
407
+ { value: model, source: modelSource },
408
+ { value: scenarioConfig?.defaultModel, source: 'scenario' },
409
+ { value: fileProviderConfig?.defaultModel, source: 'config' }
410
+ );
411
+
412
+ const resolvedBaseUrl = resolveValueWithSource<string>(
413
+ { value: scenarioConfig?.baseUrl, source: 'scenario' },
414
+ { value: fileProviderConfig?.baseUrl, source: 'config' }
415
+ );
416
+
417
+ const resolvedTimeout = resolveValueWithSource<number>(
418
+ { value: scenarioConfig?.timeout, source: 'scenario' },
419
+ { value: fileProviderConfig?.timeout, source: 'config' }
420
+ );
421
+
422
+ const resolvedMaxRetries = resolveValueWithSource<number>(
423
+ { value: scenarioConfig?.maxRetries, source: 'scenario' },
424
+ { value: fileProviderConfig?.maxRetries, source: 'config' }
425
+ );
426
+
427
+ // Temperature and maxTokens only come from CLI options
428
+ const resolvedTemperature = resolveValueWithSource<number>({ value: temperature, source: 'cli' });
429
+
430
+ const resolvedMaxTokens = resolveValueWithSource<number>({ value: maxTokens, source: 'cli' });
431
+
432
+ return {
433
+ adapterConfig: {
434
+ provider: 'anthropic',
435
+ apiKey: resolveValue(
436
+ scenarioConfig?.apiKey,
437
+ fileProviderConfig?.apiKey,
438
+ process.env.ANTHROPIC_API_KEY
439
+ ),
440
+ baseUrl: resolvedBaseUrl.value,
441
+ defaultModel: resolvedModel.value,
442
+ timeout: resolvedTimeout.value,
443
+ maxRetries: resolvedMaxRetries.value,
444
+ },
445
+ resolvedConfig: {
446
+ provider,
447
+ model: resolvedModel.value,
448
+ base_url: resolvedBaseUrl.value,
449
+ timeout: resolvedTimeout.value,
450
+ max_retries: resolvedMaxRetries.value,
451
+ temperature: resolvedTemperature.value,
452
+ max_tokens: resolvedMaxTokens.value,
453
+ source: {
454
+ provider: providerSource,
455
+ model: resolvedModel.source,
456
+ base_url: resolvedBaseUrl.source,
457
+ timeout: resolvedTimeout.source,
458
+ max_retries: resolvedMaxRetries.source,
459
+ temperature: resolvedTemperature.source,
460
+ max_tokens: resolvedMaxTokens.source,
461
+ },
462
+ },
463
+ };
464
+ }
465
+
466
+ /**
467
+ * Resolve a configuration value with source tracking
468
+ * Returns the first defined (non-undefined) value and its source
469
+ */
470
+ function resolveValueWithSource<T>(
471
+ ...options: { value: T | undefined | null; source: ConfigSource | undefined }[]
472
+ ): ResolvedValue<T> {
473
+ for (const option of options) {
474
+ if (option.value !== undefined && option.value !== null && option.value !== '') {
475
+ return { value: option.value as T, source: option.source };
476
+ }
477
+ }
478
+ return { value: undefined, source: undefined };
479
+ }
480
+
481
+ /**
482
+ * Resolve a configuration value with precedence (without source tracking)
483
+ * Returns the first defined (non-undefined) value
484
+ */
485
+ function resolveValue<T>(...values: (T | undefined | null)[]): T | undefined {
486
+ for (const value of values) {
487
+ if (value !== undefined && value !== null && value !== '') {
488
+ return value;
489
+ }
490
+ }
491
+ return undefined;
492
+ }
493
+
494
+ /**
495
+ * Get the effective provider from various sources with source tracking
496
+ */
497
+ export function resolveProviderWithSource(
498
+ cliProvider?: string,
499
+ scenarioProvider?: string,
500
+ configProvider?: string
501
+ ): { provider: string; source: ConfigSource } {
502
+ if (cliProvider) return { provider: cliProvider, source: 'cli' };
503
+ if (scenarioProvider) return { provider: scenarioProvider, source: 'scenario' };
504
+ if (configProvider) return { provider: configProvider, source: 'config' };
505
+ return { provider: 'openai', source: 'default' };
506
+ }
507
+
508
+ /**
509
+ * Get the effective model from various sources with source tracking
510
+ */
511
+ export function resolveModelWithSource(
512
+ cliModel?: string,
513
+ scenarioModel?: string,
514
+ configModel?: string
515
+ ): { model: string | undefined; source: ConfigSource | undefined } {
516
+ if (cliModel) return { model: cliModel, source: 'cli' };
517
+ if (scenarioModel) return { model: scenarioModel, source: 'scenario' };
518
+ if (configModel) return { model: configModel, source: 'config' };
519
+ return { model: undefined, source: undefined };
520
+ }
521
+
522
+ /**
523
+ * Get the effective provider from various sources (legacy - without source tracking)
524
+ */
525
+ export function resolveProvider(
526
+ cliProvider?: string,
527
+ scenarioProvider?: string,
528
+ configProvider?: string
529
+ ): string {
530
+ return cliProvider || scenarioProvider || configProvider || 'openai';
531
+ }
532
+
533
+ /**
534
+ * Get the effective model from various sources (legacy - without source tracking)
535
+ */
536
+ export function resolveModel(
537
+ cliModel?: string,
538
+ scenarioModel?: string,
539
+ configModel?: string
540
+ ): string | undefined {
541
+ return cliModel || scenarioModel || configModel;
542
+ }
@@ -0,0 +1,67 @@
1
+ /**
2
+ * Shared storage configuration builder
3
+ *
4
+ * Config precedence (top wins):
5
+ * 1. artemis.config.yaml storage section
6
+ * 2. Environment variables
7
+ * 3. Defaults
8
+ */
9
+
10
+ import { type StorageAdapter, createStorageAdapter } from '@artemiskit/core';
11
+ import type { ArtemisConfig } from '../config/schema';
12
+
13
+ export interface StorageOptions {
14
+ /** Config from artemis.config.yaml */
15
+ fileConfig?: ArtemisConfig | null;
16
+ }
17
+
18
+ /**
19
+ * Create storage adapter with config file support
20
+ *
21
+ * Resolution order:
22
+ * fileConfig.storage > environment variables > default (local)
23
+ */
24
+ export function createStorage(options: StorageOptions = {}): StorageAdapter {
25
+ const { fileConfig } = options;
26
+
27
+ // Check config file first
28
+ if (fileConfig?.storage) {
29
+ const storageConfig = fileConfig.storage;
30
+
31
+ if (storageConfig.type === 'supabase') {
32
+ // For Supabase, config values override env vars
33
+ return createStorageAdapter({
34
+ type: 'supabase',
35
+ url: storageConfig.url || process.env.SUPABASE_URL,
36
+ anonKey: storageConfig.anonKey || process.env.SUPABASE_ANON_KEY,
37
+ bucket: storageConfig.bucket || process.env.SUPABASE_BUCKET,
38
+ });
39
+ }
40
+
41
+ if (storageConfig.type === 'local') {
42
+ return createStorageAdapter({
43
+ type: 'local',
44
+ basePath: storageConfig.basePath || process.env.ARTEMIS_STORAGE_PATH || './artemis-runs',
45
+ });
46
+ }
47
+ }
48
+
49
+ // Fall back to environment variables
50
+ const supabaseUrl = process.env.SUPABASE_URL;
51
+ const supabaseKey = process.env.SUPABASE_ANON_KEY;
52
+
53
+ if (supabaseUrl && supabaseKey) {
54
+ return createStorageAdapter({
55
+ type: 'supabase',
56
+ url: supabaseUrl,
57
+ anonKey: supabaseKey,
58
+ bucket: process.env.SUPABASE_BUCKET,
59
+ });
60
+ }
61
+
62
+ // Default to local storage
63
+ return createStorageAdapter({
64
+ type: 'local',
65
+ basePath: process.env.ARTEMIS_STORAGE_PATH || './artemis-runs',
66
+ });
67
+ }
package/tsconfig.json ADDED
@@ -0,0 +1,13 @@
1
+ {
2
+ "extends": "../../tsconfig.json",
3
+ "compilerOptions": {
4
+ "outDir": "./dist",
5
+ "rootDir": ".",
6
+ "noEmit": false,
7
+ "declaration": true,
8
+ "declarationMap": true,
9
+ "emitDeclarationOnly": true
10
+ },
11
+ "include": ["src/**/*", "bin/**/*"],
12
+ "exclude": ["node_modules", "dist", "**/*.test.ts"]
13
+ }