@cogitator-ai/openai-compat 1.1.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/README.md +692 -29
  2. package/package.json +5 -5
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # @cogitator-ai/openai-compat
2
2
 
3
- OpenAI Assistants API compatibility layer for Cogitator. Use OpenAI SDK clients with Cogitator backend.
3
+ OpenAI Assistants API compatibility layer for Cogitator. Use OpenAI SDK clients with Cogitator backend, or integrate Cogitator with existing OpenAI-based applications.
4
4
 
5
5
  ## Installation
6
6
 
@@ -8,70 +8,733 @@ OpenAI Assistants API compatibility layer for Cogitator. Use OpenAI SDK clients
8
8
  pnpm add @cogitator-ai/openai-compat
9
9
  ```
10
10
 
11
- ## Usage
11
+ ## Features
12
12
 
13
- ### Start the Server
13
+ - **OpenAI Server** - Expose Cogitator as OpenAI-compatible REST API
14
+ - **OpenAI Adapter** - In-process adapter for programmatic access
15
+ - **Thread Manager** - Manage conversations, messages, and assistants
16
+ - **File Operations** - Upload and manage files for assistants
17
+ - **Full Assistants API** - Create, update, delete assistants
18
+ - **Run Management** - Execute runs with tool support
19
+ - **Authentication** - Optional API key authentication
20
+ - **CORS Support** - Configurable cross-origin requests
21
+
22
+ ---
23
+
24
+ ## Quick Start
25
+
26
+ ### Server Mode
14
27
 
15
28
  ```typescript
16
29
  import { createOpenAIServer } from '@cogitator-ai/openai-compat';
17
- import { Cogitator } from '@cogitator-ai/core';
30
+ import { Cogitator, tool } from '@cogitator-ai/core';
31
+ import { z } from 'zod';
32
+
33
+ const calculator = tool({
34
+ name: 'calculator',
35
+ description: 'Perform calculations',
36
+ parameters: z.object({
37
+ expression: z.string(),
38
+ }),
39
+ execute: async ({ expression }) => eval(expression).toString(),
40
+ });
41
+
42
+ const cogitator = new Cogitator({
43
+ defaultModel: 'openai/gpt-4o-mini',
44
+ });
18
45
 
19
- const cogitator = new Cogitator();
20
46
  const server = createOpenAIServer(cogitator, {
21
- port: 3001,
22
- apiKey: 'your-api-key', // Optional auth
47
+ port: 8080,
48
+ tools: [calculator],
49
+ apiKeys: ['sk-my-secret-key'],
23
50
  });
24
51
 
25
52
  await server.start();
53
+ // Server is now available at http://localhost:8080
26
54
  ```
27
55
 
28
- ### Use with OpenAI SDK
56
+ ### Client Mode
29
57
 
30
58
  ```typescript
31
59
  import OpenAI from 'openai';
32
60
 
33
61
  const openai = new OpenAI({
34
- baseURL: 'http://localhost:3001/v1',
35
- apiKey: 'your-api-key',
62
+ baseURL: 'http://localhost:8080/v1',
63
+ apiKey: 'sk-my-secret-key',
36
64
  });
37
65
 
38
- // Create an assistant
39
66
  const assistant = await openai.beta.assistants.create({
40
- name: 'My Assistant',
41
- instructions: 'You are a helpful assistant',
67
+ name: 'Math Tutor',
68
+ instructions: 'You help with math problems',
42
69
  model: 'ollama/llama3.2:3b',
43
70
  });
44
71
 
45
- // Create a thread
46
72
  const thread = await openai.beta.threads.create();
47
73
 
48
- // Add a message
49
74
  await openai.beta.threads.messages.create(thread.id, {
50
75
  role: 'user',
51
- content: 'Hello!',
76
+ content: 'What is 2 + 2?',
52
77
  });
53
78
 
54
- // Run the assistant
55
79
  const run = await openai.beta.threads.runs.create(thread.id, {
56
80
  assistant_id: assistant.id,
57
81
  });
82
+
83
+ // Poll for completion
84
+ let status = run.status;
85
+ while (status === 'queued' || status === 'in_progress') {
86
+ await new Promise((r) => setTimeout(r, 1000));
87
+ const updated = await openai.beta.threads.runs.retrieve(thread.id, run.id);
88
+ status = updated.status;
89
+ }
90
+
91
+ const messages = await openai.beta.threads.messages.list(thread.id);
92
+ console.log(messages.data[0].content);
93
+ ```
94
+
95
+ ---
96
+
97
+ ## OpenAI Server
98
+
99
+ The `OpenAIServer` exposes Cogitator as an OpenAI-compatible REST API.
100
+
101
+ ### Configuration
102
+
103
+ ```typescript
104
+ import { OpenAIServer, createOpenAIServer } from '@cogitator-ai/openai-compat';
105
+
106
+ const server = new OpenAIServer(cogitator, {
107
+ port: 8080,
108
+ host: '0.0.0.0',
109
+
110
+ apiKeys: ['sk-key1', 'sk-key2'],
111
+
112
+ tools: [calculator, datetime, webSearch],
113
+
114
+ logging: true,
115
+
116
+ cors: {
117
+ origin: ['http://localhost:3000', 'https://myapp.com'],
118
+ methods: ['GET', 'POST', 'DELETE', 'OPTIONS'],
119
+ },
120
+ });
121
+ ```
122
+
123
+ ### Configuration Options
124
+
125
+ | Option | Type | Default | Description |
126
+ | -------------- | ------------------------------- | -------------------------------------- | ------------------------------------------------ |
127
+ | `port` | `number` | `8080` | Port to listen on |
128
+ | `host` | `string` | `'0.0.0.0'` | Host to bind to |
129
+ | `apiKeys` | `string[]` | `[]` | API keys for authentication. Empty disables auth |
130
+ | `tools` | `Tool[]` | `[]` | Tools available to assistants |
131
+ | `logging` | `boolean` | `false` | Enable request logging |
132
+ | `cors.origin` | `string \| string[] \| boolean` | `true` | CORS origin configuration |
133
+ | `cors.methods` | `string[]` | `['GET', 'POST', 'DELETE', 'OPTIONS']` | Allowed HTTP methods |
134
+
135
+ ### Server Lifecycle
136
+
137
+ ```typescript
138
+ await server.start();
139
+
140
+ console.log(server.getUrl());
141
+ console.log(server.getBaseUrl());
142
+
143
+ console.log(server.isRunning());
144
+
145
+ const adapter = server.getAdapter();
146
+
147
+ await server.stop();
148
+ ```
149
+
150
+ ### Health Check
151
+
152
+ The server provides a health endpoint:
153
+
154
+ ```bash
155
+ curl http://localhost:8080/health
156
+ # {"status":"ok"}
157
+ ```
158
+
159
+ ---
160
+
161
+ ## OpenAI Adapter
162
+
163
+ The `OpenAIAdapter` provides in-process access without running a server.
164
+
165
+ ```typescript
166
+ import { OpenAIAdapter, createOpenAIAdapter } from '@cogitator-ai/openai-compat';
167
+
168
+ const adapter = createOpenAIAdapter(cogitator, {
169
+ tools: [calculator],
170
+ });
171
+ ```
172
+
173
+ ### Assistant Management
174
+
175
+ ```typescript
176
+ const assistant = adapter.createAssistant({
177
+ model: 'openai/gpt-4o',
178
+ name: 'Code Helper',
179
+ instructions: 'You help write code',
180
+ temperature: 0.7,
181
+ tools: [{ type: 'code_interpreter' }],
182
+ metadata: { category: 'development' },
183
+ });
184
+
185
+ const fetched = adapter.getAssistant(assistant.id);
186
+
187
+ const updated = adapter.updateAssistant(assistant.id, {
188
+ name: 'Code Expert',
189
+ temperature: 0.5,
190
+ });
191
+
192
+ const all = adapter.listAssistants();
193
+
194
+ const deleted = adapter.deleteAssistant(assistant.id);
195
+ ```
196
+
197
+ ### Thread Operations
198
+
199
+ ```typescript
200
+ const thread = adapter.createThread({ project: 'demo' });
201
+
202
+ const fetched = adapter.getThread(thread.id);
203
+
204
+ const message = adapter.addMessage(thread.id, {
205
+ role: 'user',
206
+ content: 'Hello, how are you?',
207
+ metadata: { source: 'web' },
208
+ });
209
+
210
+ const messages = adapter.listMessages(thread.id, {
211
+ limit: 20,
212
+ order: 'asc',
213
+ after: 'msg_abc123',
214
+ before: 'msg_xyz789',
215
+ run_id: 'run_123',
216
+ });
217
+
218
+ const msg = adapter.getMessage(thread.id, 'msg_abc123');
219
+
220
+ adapter.deleteThread(thread.id);
221
+ ```
222
+
223
+ ### Run Execution
224
+
225
+ ```typescript
226
+ const run = await adapter.createRun(thread.id, {
227
+ assistant_id: assistant.id,
228
+ model: 'openai/gpt-4o',
229
+ instructions: 'Be concise',
230
+ temperature: 0.5,
231
+ additional_messages: [{ role: 'user', content: 'Extra context' }],
232
+ metadata: { source: 'api' },
233
+ });
234
+
235
+ const status = adapter.getRun(thread.id, run.id);
236
+
237
+ const cancelled = adapter.cancelRun(thread.id, run.id);
238
+ ```
239
+
240
+ ### Tool Outputs
241
+
242
+ ```typescript
243
+ const run = adapter.getRun(thread.id, runId);
244
+
245
+ if (run?.status === 'requires_action') {
246
+ const toolCalls = run.required_action?.submit_tool_outputs.tool_calls;
247
+
248
+ const outputs = await Promise.all(
249
+ toolCalls!.map(async (call) => ({
250
+ tool_call_id: call.id,
251
+ output: await executeMyTool(call.function.name, call.function.arguments),
252
+ }))
253
+ );
254
+
255
+ await adapter.submitToolOutputs(thread.id, runId, {
256
+ tool_outputs: outputs,
257
+ });
258
+ }
259
+ ```
260
+
261
+ ---
262
+
263
+ ## Thread Manager
264
+
265
+ The `ThreadManager` handles storage for threads, messages, assistants, and files.
266
+
267
+ ```typescript
268
+ import { ThreadManager } from '@cogitator-ai/openai-compat';
269
+
270
+ const manager = new ThreadManager();
271
+ ```
272
+
273
+ ### Assistant Storage
274
+
275
+ ```typescript
276
+ interface StoredAssistant {
277
+ id: string;
278
+ name: string | null;
279
+ model: string;
280
+ instructions: string | null;
281
+ tools: AssistantTool[];
282
+ metadata: Record<string, string>;
283
+ temperature?: number;
284
+ created_at: number;
285
+ }
286
+
287
+ const assistant = manager.createAssistant({
288
+ model: 'gpt-4o',
289
+ name: 'Helper',
290
+ instructions: 'Be helpful',
291
+ });
292
+
293
+ const fetched = manager.getAssistant(assistant.id);
294
+ const updated = manager.updateAssistant(assistant.id, { name: 'Expert' });
295
+ const all = manager.listAssistants();
296
+ manager.deleteAssistant(assistant.id);
297
+ ```
298
+
299
+ ### Thread Storage
300
+
301
+ ```typescript
302
+ const thread = manager.createThread({ key: 'value' });
303
+ const fetched = manager.getThread(thread.id);
304
+ manager.deleteThread(thread.id);
305
+ ```
306
+
307
+ ### Message Operations
308
+
309
+ ```typescript
310
+ const message = manager.addMessage(thread.id, {
311
+ role: 'user',
312
+ content: 'Hello!',
313
+ });
314
+
315
+ const assistantMsg = manager.addAssistantMessage(thread.id, 'Hi there!', assistant.id, run.id);
316
+
317
+ const messages = manager.listMessages(thread.id, {
318
+ limit: 50,
319
+ order: 'desc',
320
+ });
321
+
322
+ const llmMessages = manager.getMessagesForLLM(thread.id);
323
+ ```
324
+
325
+ ### File Management
326
+
327
+ ```typescript
328
+ const file = manager.addFile(Buffer.from('file content'), 'document.txt');
329
+
330
+ const fetched = manager.getFile(file.id);
331
+
332
+ const all = manager.listFiles();
333
+
334
+ manager.deleteFile(file.id);
335
+ ```
336
+
337
+ ---
338
+
339
+ ## Supported Endpoints
340
+
341
+ ### Models
342
+
343
+ | Method | Endpoint | Description |
344
+ | ------ | ------------ | --------------------- |
345
+ | GET | `/v1/models` | List available models |
346
+
347
+ ### Assistants
348
+
349
+ | Method | Endpoint | Description |
350
+ | ------ | -------------------- | ---------------- |
351
+ | POST | `/v1/assistants` | Create assistant |
352
+ | GET | `/v1/assistants` | List assistants |
353
+ | GET | `/v1/assistants/:id` | Get assistant |
354
+ | POST | `/v1/assistants/:id` | Update assistant |
355
+ | DELETE | `/v1/assistants/:id` | Delete assistant |
356
+
357
+ ### Threads
358
+
359
+ | Method | Endpoint | Description |
360
+ | ------ | ----------------- | ------------- |
361
+ | POST | `/v1/threads` | Create thread |
362
+ | GET | `/v1/threads/:id` | Get thread |
363
+ | DELETE | `/v1/threads/:id` | Delete thread |
364
+
365
+ ### Messages
366
+
367
+ | Method | Endpoint | Description |
368
+ | ------ | ---------------------------------- | ------------- |
369
+ | POST | `/v1/threads/:id/messages` | Add message |
370
+ | GET | `/v1/threads/:id/messages` | List messages |
371
+ | GET | `/v1/threads/:id/messages/:msg_id` | Get message |
372
+
373
+ ### Runs
374
+
375
+ | Method | Endpoint | Description |
376
+ | ------ | -------------------------------------------------- | ------------------- |
377
+ | POST | `/v1/threads/:id/runs` | Create run |
378
+ | GET | `/v1/threads/:id/runs/:run_id` | Get run status |
379
+ | POST | `/v1/threads/:id/runs/:run_id/cancel` | Cancel run |
380
+ | POST | `/v1/threads/:id/runs/:run_id/submit_tool_outputs` | Submit tool outputs |
381
+
382
+ ### Files
383
+
384
+ | Method | Endpoint | Description |
385
+ | ------ | ----------------------- | --------------------- |
386
+ | POST | `/v1/files` | Upload file |
387
+ | GET | `/v1/files` | List files |
388
+ | GET | `/v1/files/:id` | Get file metadata |
389
+ | GET | `/v1/files/:id/content` | Download file content |
390
+ | DELETE | `/v1/files/:id` | Delete file |
391
+
392
+ ---
393
+
394
+ ## Error Handling
395
+
396
+ The server returns OpenAI-compatible error responses:
397
+
398
+ ```typescript
399
+ interface OpenAIError {
400
+ error: {
401
+ message: string;
402
+ type: string;
403
+ param?: string;
404
+ code?: string;
405
+ };
406
+ }
407
+ ```
408
+
409
+ ### Error Types
410
+
411
+ | HTTP Status | Type | Description |
412
+ | ----------- | ----------------------- | -------------------------- |
413
+ | 400 | `invalid_request_error` | Invalid request parameters |
414
+ | 401 | `authentication_error` | Invalid or missing API key |
415
+ | 404 | `invalid_request_error` | Resource not found |
416
+ | 500 | `server_error` | Internal server error |
417
+
418
+ ### Client-Side Error Handling
419
+
420
+ ```typescript
421
+ try {
422
+ const run = await openai.beta.threads.runs.create(threadId, {
423
+ assistant_id: 'invalid-id',
424
+ });
425
+ } catch (error) {
426
+ if (error instanceof OpenAI.APIError) {
427
+ console.log(error.status);
428
+ console.log(error.message);
429
+ console.log(error.code);
430
+ }
431
+ }
432
+ ```
433
+
434
+ ---
435
+
436
+ ## Run Status
437
+
438
+ Runs go through the following states:
439
+
440
+ ```typescript
441
+ type RunStatus =
442
+ | 'queued'
443
+ | 'in_progress'
444
+ | 'requires_action'
445
+ | 'cancelling'
446
+ | 'cancelled'
447
+ | 'failed'
448
+ | 'completed'
449
+ | 'incomplete'
450
+ | 'expired';
451
+ ```
452
+
453
+ ### Status Flow
454
+
455
+ ```
456
+ queued → in_progress → completed
457
+ → failed
458
+ → requires_action → in_progress → ...
459
+
460
+ in_progress → cancelling → cancelled
461
+ ```
462
+
463
+ ### Polling for Completion
464
+
465
+ ```typescript
466
+ async function waitForRun(openai: OpenAI, threadId: string, runId: string): Promise<Run> {
467
+ const terminalStates = ['completed', 'failed', 'cancelled', 'expired'];
468
+
469
+ while (true) {
470
+ const run = await openai.beta.threads.runs.retrieve(threadId, runId);
471
+
472
+ if (terminalStates.includes(run.status)) {
473
+ return run;
474
+ }
475
+
476
+ if (run.status === 'requires_action') {
477
+ return run;
478
+ }
479
+
480
+ await new Promise((r) => setTimeout(r, 1000));
481
+ }
482
+ }
483
+ ```
484
+
485
+ ---
486
+
487
+ ## Stream Events
488
+
489
+ The package defines stream event types for future streaming support:
490
+
491
+ ```typescript
492
+ type StreamEvent =
493
+ | { event: 'thread.created'; data: Thread }
494
+ | { event: 'thread.run.created'; data: Run }
495
+ | { event: 'thread.run.queued'; data: Run }
496
+ | { event: 'thread.run.in_progress'; data: Run }
497
+ | { event: 'thread.run.requires_action'; data: Run }
498
+ | { event: 'thread.run.completed'; data: Run }
499
+ | { event: 'thread.run.failed'; data: Run }
500
+ | { event: 'thread.run.cancelled'; data: Run }
501
+ | { event: 'thread.message.created'; data: Message }
502
+ | { event: 'thread.message.delta'; data: MessageDelta }
503
+ | { event: 'thread.message.completed'; data: Message }
504
+ | { event: 'done'; data: '[DONE]' };
505
+ ```
506
+
507
+ ---
508
+
509
+ ## Type Reference
510
+
511
+ ### Core Types
512
+
513
+ ```typescript
514
+ import type {
515
+ OpenAIError,
516
+ ListResponse,
517
+ Assistant,
518
+ AssistantTool,
519
+ FunctionDefinition,
520
+ ResponseFormat,
521
+ CreateAssistantRequest,
522
+ UpdateAssistantRequest,
523
+ } from '@cogitator-ai/openai-compat';
524
+ ```
525
+
526
+ ### Thread Types
527
+
528
+ ```typescript
529
+ import type { Thread, ToolResources, CreateThreadRequest } from '@cogitator-ai/openai-compat';
530
+ ```
531
+
532
+ ### Message Types
533
+
534
+ ```typescript
535
+ import type {
536
+ Message,
537
+ MessageContent,
538
+ TextContent,
539
+ TextAnnotation,
540
+ Attachment,
541
+ CreateMessageRequest,
542
+ MessageContentPart,
543
+ MessageDelta,
544
+ } from '@cogitator-ai/openai-compat';
545
+ ```
546
+
547
+ ### Run Types
548
+
549
+ ```typescript
550
+ import type {
551
+ Run,
552
+ RunStatus,
553
+ RequiredAction,
554
+ ToolCall,
555
+ RunError,
556
+ Usage,
557
+ ToolChoice,
558
+ CreateRunRequest,
559
+ SubmitToolOutputsRequest,
560
+ ToolOutput,
561
+ } from '@cogitator-ai/openai-compat';
562
+ ```
563
+
564
+ ### Run Step Types
565
+
566
+ ```typescript
567
+ import type { RunStep, StepDetails, StepToolCall, RunStepDelta } from '@cogitator-ai/openai-compat';
568
+ ```
569
+
570
+ ### File Types
571
+
572
+ ```typescript
573
+ import type { FileObject, FilePurpose, UploadFileRequest } from '@cogitator-ai/openai-compat';
574
+ ```
575
+
576
+ ### Stream Types
577
+
578
+ ```typescript
579
+ import type { StreamEvent, MessageDelta, RunStepDelta } from '@cogitator-ai/openai-compat';
580
+ ```
581
+
582
+ ---
583
+
584
+ ## Examples
585
+
586
+ ### Chat Bot with Memory
587
+
588
+ ```typescript
589
+ import { createOpenAIServer } from '@cogitator-ai/openai-compat';
590
+ import { Cogitator } from '@cogitator-ai/core';
591
+ import OpenAI from 'openai';
592
+
593
+ const cogitator = new Cogitator({
594
+ defaultModel: 'ollama/llama3.2:3b',
595
+ });
596
+
597
+ const server = createOpenAIServer(cogitator, { port: 8080 });
598
+ await server.start();
599
+
600
+ const openai = new OpenAI({
601
+ baseURL: server.getBaseUrl(),
602
+ apiKey: 'not-needed',
603
+ });
604
+
605
+ const assistant = await openai.beta.assistants.create({
606
+ name: 'Chat Bot',
607
+ instructions: 'You are a friendly chat bot. Remember previous messages.',
608
+ model: 'ollama/llama3.2:3b',
609
+ });
610
+
611
+ const thread = await openai.beta.threads.create();
612
+
613
+ async function chat(message: string): Promise<string> {
614
+ await openai.beta.threads.messages.create(thread.id, {
615
+ role: 'user',
616
+ content: message,
617
+ });
618
+
619
+ const run = await openai.beta.threads.runs.create(thread.id, {
620
+ assistant_id: assistant.id,
621
+ });
622
+
623
+ while (true) {
624
+ const status = await openai.beta.threads.runs.retrieve(thread.id, run.id);
625
+ if (status.status === 'completed') break;
626
+ if (status.status === 'failed') throw new Error(status.last_error?.message);
627
+ await new Promise((r) => setTimeout(r, 500));
628
+ }
629
+
630
+ const messages = await openai.beta.threads.messages.list(thread.id, {
631
+ limit: 1,
632
+ order: 'desc',
633
+ });
634
+
635
+ const content = messages.data[0].content[0];
636
+ return content.type === 'text' ? content.text.value : '';
637
+ }
638
+
639
+ console.log(await chat('Hi, my name is Alex'));
640
+ console.log(await chat('What is my name?'));
58
641
  ```
59
642
 
60
- ### Supported Endpoints
643
+ ### Code Assistant with Tools
644
+
645
+ ```typescript
646
+ import { createOpenAIServer } from '@cogitator-ai/openai-compat';
647
+ import { Cogitator, tool } from '@cogitator-ai/core';
648
+ import { z } from 'zod';
649
+ import OpenAI from 'openai';
61
650
 
62
- - `POST /v1/assistants` - Create assistant
63
- - `GET /v1/assistants` - List assistants
64
- - `DELETE /v1/assistants/:id` - Delete assistant
65
- - `POST /v1/threads` - Create thread
66
- - `GET /v1/threads/:id` - Get thread
67
- - `POST /v1/threads/:id/messages` - Add message
68
- - `GET /v1/threads/:id/messages` - List messages
69
- - `POST /v1/threads/:id/runs` - Create run
70
- - `GET /v1/threads/:id/runs/:id` - Get run status
651
+ const runCode = tool({
652
+ name: 'run_code',
653
+ description: 'Execute Python code',
654
+ parameters: z.object({
655
+ code: z.string().describe('Python code to execute'),
656
+ }),
657
+ execute: async ({ code }) => {
658
+ return `Output: ${code.length} characters`;
659
+ },
660
+ });
661
+
662
+ const cogitator = new Cogitator({
663
+ defaultModel: 'openai/gpt-4o',
664
+ });
665
+
666
+ const server = createOpenAIServer(cogitator, {
667
+ port: 8080,
668
+ tools: [runCode],
669
+ });
670
+
671
+ await server.start();
71
672
 
72
- ## Documentation
673
+ const openai = new OpenAI({
674
+ baseURL: server.getBaseUrl(),
675
+ apiKey: process.env.OPENAI_API_KEY,
676
+ });
677
+
678
+ const assistant = await openai.beta.assistants.create({
679
+ name: 'Code Runner',
680
+ instructions: 'You can run Python code using the run_code tool.',
681
+ model: 'openai/gpt-4o',
682
+ tools: [{ type: 'function', function: { name: 'run_code' } }],
683
+ });
684
+ ```
685
+
686
+ ### File Upload
687
+
688
+ ```typescript
689
+ import OpenAI from 'openai';
690
+
691
+ const openai = new OpenAI({
692
+ baseURL: 'http://localhost:8080/v1',
693
+ apiKey: 'key',
694
+ });
695
+
696
+ const file = await openai.files.create({
697
+ file: fs.createReadStream('data.csv'),
698
+ purpose: 'assistants',
699
+ });
700
+
701
+ console.log('Uploaded:', file.id);
702
+
703
+ const content = await openai.files.content(file.id);
704
+ console.log('Content:', await content.text());
705
+
706
+ await openai.files.del(file.id);
707
+ ```
708
+
709
+ ### Multi-Model Setup
710
+
711
+ ```typescript
712
+ const cogitator = new Cogitator({
713
+ defaultModel: 'ollama/llama3.2:3b',
714
+ });
715
+
716
+ const server = createOpenAIServer(cogitator, { port: 8080 });
717
+ await server.start();
718
+
719
+ const openai = new OpenAI({
720
+ baseURL: server.getBaseUrl(),
721
+ apiKey: 'not-needed',
722
+ });
723
+
724
+ const localAssistant = await openai.beta.assistants.create({
725
+ name: 'Local Assistant',
726
+ model: 'ollama/llama3.2:3b',
727
+ instructions: 'Fast local responses',
728
+ });
729
+
730
+ const cloudAssistant = await openai.beta.assistants.create({
731
+ name: 'Cloud Assistant',
732
+ model: 'openai/gpt-4o',
733
+ instructions: 'Complex reasoning tasks',
734
+ });
735
+ ```
73
736
 
74
- See the [Cogitator documentation](https://github.com/eL1fe/cogitator) for full API reference.
737
+ ---
75
738
 
76
739
  ## License
77
740
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@cogitator-ai/openai-compat",
3
- "version": "1.1.0",
3
+ "version": "2.0.0",
4
4
  "description": "OpenAI Assistants API compatibility layer for Cogitator",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -21,8 +21,8 @@
21
21
  "fastify": "^5.2.3",
22
22
  "nanoid": "^5.1.5",
23
23
  "zod": "^3.22.4",
24
- "@cogitator-ai/core": "0.2.0",
25
- "@cogitator-ai/types": "0.3.1"
24
+ "@cogitator-ai/core": "0.3.0",
25
+ "@cogitator-ai/types": "0.4.0"
26
26
  },
27
27
  "devDependencies": {
28
28
  "@types/busboy": "^1.5.4",
@@ -30,8 +30,8 @@
30
30
  "vitest": "^1.6.0"
31
31
  },
32
32
  "peerDependencies": {
33
- "@cogitator-ai/types": "0.3.1",
34
- "@cogitator-ai/core": "0.2.0"
33
+ "@cogitator-ai/core": "0.3.0",
34
+ "@cogitator-ai/types": "0.4.0"
35
35
  },
36
36
  "engines": {
37
37
  "node": ">=20.0.0"