orchestrated 0.1.14 → 0.1.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.d.ts +58 -5
  2. package/index.js +35136 -35019
  3. package/index.js.map +81 -81
  4. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -55,11 +55,18 @@ export declare interface BaseScorerConfig {
55
55
  }
56
56
 
57
57
  /**
58
- * Client for submitting and managing batch requests to LiteLLM
59
- * Uses OpenAI-compatible Batch API for cost reduction (typically 50% discount)
58
+ * Client for submitting and managing batch requests
59
+ * Supports OpenAI Batch API and AWS Bedrock OpenAI-compatible Batch API
60
+ *
61
+ * Modes:
62
+ * - "OPENAI" (default): Uses OpenAI Files API for upload/download
63
+ * - "BEDROCK_OPENAI": Uses S3 for file I/O with Bedrock's OpenAI-compatible endpoint
60
64
  */
61
65
  declare class BatchClient {
62
66
  private client;
67
+ private mode;
68
+ private s3FileManager;
69
+ private bedrockConfig;
63
70
  private tempDir;
64
71
  requests: BatchRequest[];
65
72
  responses: BatchResult[];
@@ -69,7 +76,7 @@ declare class BatchClient {
69
76
  pendingBatches: Batch[];
70
77
  completedBatches: Batch[];
71
78
  duplicateCount: number;
72
- constructor(clientOptions?: ClientOptions);
79
+ constructor(options?: BatchClientOptions | ClientOptions);
73
80
  /**
74
81
  * Initialize the batch client by checking for existing batches
75
82
  * Must be called after constructor
@@ -102,11 +109,13 @@ declare class BatchClient {
102
109
  */
103
110
  private createJSONLFile;
104
111
  /**
105
- * Uploads a JSONL file to the LiteLLM/OpenAI API
112
+ * Uploads a JSONL file for batch processing
113
+ * - OpenAI mode: Uses OpenAI Files API
114
+ * - Bedrock mode: Uploads to S3, returns mock FileObject with S3 URI as id
106
115
  */
107
116
  private uploadFile;
108
117
  /**
109
- * Submits a batch of requests to LiteLLM
118
+ * Submits a batch of requests
110
119
  * Returns batch ID for tracking
111
120
  *
112
121
  * @param requests Array of batch requests to submit
@@ -127,6 +136,8 @@ declare class BatchClient {
127
136
  isBatchComplete(batch: Batch): boolean;
128
137
  /**
129
138
  * Downloads and parses results from a completed batch
139
+ * - OpenAI mode: Downloads via Files API
140
+ * - Bedrock mode: Reads from S3 output path
130
141
  *
131
142
  * @param batchId The batch ID to retrieve results for
132
143
  * @returns Array of batch results
@@ -151,6 +162,15 @@ declare class BatchClient {
151
162
  } | undefined;
152
163
  }
153
164
 
165
+ /**
166
+ * Options for constructing a BatchClient
167
+ */
168
+ declare interface BatchClientOptions {
169
+ mode?: "OPENAI" | "BEDROCK_OPENAI";
170
+ clientOptions?: ClientOptions;
171
+ bedrock?: BedrockBatchConfig;
172
+ }
173
+
154
174
  /**
155
175
  * Represents a single request in a batch
156
176
  */
@@ -176,6 +196,18 @@ declare interface BatchResult {
176
196
  };
177
197
  }
178
198
 
199
+ /**
200
+ * Bedrock-specific configuration for BEDROCK_OPENAI mode
201
+ */
202
+ declare interface BedrockBatchConfig {
203
+ region: string;
204
+ apiKey: string;
205
+ serviceRoleArn: string;
206
+ modelId: string;
207
+ s3Bucket: string;
208
+ s3Prefix?: string;
209
+ }
210
+
179
211
  export declare const Behavioral: ((args: unknown) => Promise<Score>) & {
180
212
  definition?: SerializableScorerDefinition;
181
213
  };
@@ -453,6 +485,12 @@ export declare interface EvalOptions<EvalReport = boolean> {
453
485
  * If not provided, a default OpenAI client will be created.
454
486
  */
455
487
  openaiClient?: default_2;
488
+ /**
489
+ * Batch processing backend:
490
+ * - "OPENAI": Use OpenAI Batch API (default)
491
+ * - "BEDROCK_OPENAI": Use AWS Bedrock OpenAI-compatible Batch API
492
+ */
493
+ batchProcessor?: "OPENAI" | "BEDROCK_OPENAI";
456
494
  __schedule?: string;
457
495
  }
458
496
 
@@ -522,6 +560,11 @@ declare interface EvalScorerContext {
522
560
  * - ORCHESTRATED_OTEL_ENDPOINT
523
561
  * - ORCHESTRATED_SEND_NO_LOGS
524
562
  * - ORCHESTRATED_DISABLE_BUNDLE_CACHE
563
+ * - ORCHESTRATED_BEDROCK_SERVICE_ROLE_ARN
564
+ * - ORCHESTRATED_BEDROCK_MODEL_ID
565
+ * - ORCHESTRATED_BEDROCK_REGION
566
+ * - ORCHESTRATED_BEDROCK_API_KEY
567
+ * - ORCHESTRATED_BEDROCK_S3_BUCKET
525
568
  *
526
569
  * Config File (Orchestrated.yaml):
527
570
  * - apiUrl
@@ -572,6 +615,16 @@ export declare interface EvalState {
572
615
  awsSessionToken: string | null;
573
616
  /** Disable bundle caching (useful for Lambda to always fetch fresh bundles) */
574
617
  disableBundleCache: boolean;
618
+ /** AWS Bedrock service role ARN (X-Amzn-Bedrock-RoleArn header) */
619
+ bedrockServiceRoleArn: string | null;
620
+ /** AWS Bedrock model ID (X-Amzn-Bedrock-ModelId header) */
621
+ bedrockModelId: string | null;
622
+ /** AWS region for Bedrock endpoint */
623
+ bedrockRegion: string | null;
624
+ /** Bedrock API key for bearer token auth */
625
+ bedrockApiKey: string | null;
626
+ /** S3 bucket for Bedrock batch I/O */
627
+ bedrockS3Bucket: string | null;
575
628
  }
576
629
 
577
630
  /**