@artemiskit/adapter-openai 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +54 -0
- package/dist/client.d.ts +19 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5648 -0
- package/dist/types.d.ts +24 -0
- package/dist/types.d.ts.map +1 -0
- package/package.json +47 -0
- package/src/client.ts +166 -0
- package/src/index.ts +8 -0
- package/src/types.ts +25 -0
- package/tsconfig.json +13 -0
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI adapter types
|
|
3
|
+
*/
|
|
4
|
+
export interface OpenAIAdapterConfig {
|
|
5
|
+
provider: 'openai';
|
|
6
|
+
apiKey?: string;
|
|
7
|
+
baseUrl?: string;
|
|
8
|
+
defaultModel?: string;
|
|
9
|
+
timeout?: number;
|
|
10
|
+
maxRetries?: number;
|
|
11
|
+
organization?: string;
|
|
12
|
+
}
|
|
13
|
+
export interface AzureOpenAIAdapterConfig {
|
|
14
|
+
provider: 'azure-openai';
|
|
15
|
+
apiKey?: string;
|
|
16
|
+
baseUrl?: string;
|
|
17
|
+
defaultModel?: string;
|
|
18
|
+
timeout?: number;
|
|
19
|
+
maxRetries?: number;
|
|
20
|
+
resourceName: string;
|
|
21
|
+
deploymentName: string;
|
|
22
|
+
apiVersion: string;
|
|
23
|
+
}
|
|
24
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,MAAM,WAAW,mBAAmB;IAClC,QAAQ,EAAE,QAAQ,CAAC;IACnB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,wBAAwB;IACvC,QAAQ,EAAE,cAAc,CAAC;IACzB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,YAAY,EAAE,MAAM,CAAC;IACrB,cAAc,EAAE,MAAM,CAAC;IACvB,UAAU,EAAE,MAAM,CAAC;CACpB"}
|
package/package.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@artemiskit/adapter-openai",
|
|
3
|
+
"version": "0.1.2",
|
|
4
|
+
"description": "OpenAI and Azure OpenAI adapter for ArtemisKit LLM evaluation toolkit",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"license": "Apache-2.0",
|
|
7
|
+
"author": "code-sensei",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "https://github.com/code-sensei/artemiskit.git",
|
|
11
|
+
"directory": "packages/adapters/openai"
|
|
12
|
+
},
|
|
13
|
+
"bugs": {
|
|
14
|
+
"url": "https://github.com/code-sensei/artemiskit/issues"
|
|
15
|
+
},
|
|
16
|
+
"keywords": [
|
|
17
|
+
"llm",
|
|
18
|
+
"openai",
|
|
19
|
+
"azure",
|
|
20
|
+
"gpt-4",
|
|
21
|
+
"adapter",
|
|
22
|
+
"artemiskit"
|
|
23
|
+
],
|
|
24
|
+
"main": "./dist/index.js",
|
|
25
|
+
"types": "./dist/index.d.ts",
|
|
26
|
+
"exports": {
|
|
27
|
+
".": {
|
|
28
|
+
"import": "./dist/index.js",
|
|
29
|
+
"types": "./dist/index.d.ts"
|
|
30
|
+
}
|
|
31
|
+
},
|
|
32
|
+
"scripts": {
|
|
33
|
+
"build": "tsc && bun build ./src/index.ts --outdir ./dist --target bun",
|
|
34
|
+
"typecheck": "tsc --noEmit",
|
|
35
|
+
"clean": "rm -rf dist",
|
|
36
|
+
"test": "bun test"
|
|
37
|
+
},
|
|
38
|
+
"dependencies": {
|
|
39
|
+
"@artemiskit/core": "workspace:*",
|
|
40
|
+
"openai": "^4.28.0",
|
|
41
|
+
"nanoid": "^5.0.0"
|
|
42
|
+
},
|
|
43
|
+
"devDependencies": {
|
|
44
|
+
"@types/bun": "^1.1.0",
|
|
45
|
+
"typescript": "^5.3.0"
|
|
46
|
+
}
|
|
47
|
+
}
|
package/src/client.ts
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI SDK Adapter
|
|
3
|
+
* Supports: OpenAI, Azure OpenAI, OpenAI-compatible APIs
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import type {
|
|
7
|
+
AdapterConfig,
|
|
8
|
+
GenerateOptions,
|
|
9
|
+
GenerateResult,
|
|
10
|
+
ModelCapabilities,
|
|
11
|
+
ModelClient,
|
|
12
|
+
} from '@artemiskit/core';
|
|
13
|
+
import { nanoid } from 'nanoid';
|
|
14
|
+
import OpenAI from 'openai';
|
|
15
|
+
import type { AzureOpenAIAdapterConfig, OpenAIAdapterConfig } from './types';
|
|
16
|
+
|
|
17
|
+
export class OpenAIAdapter implements ModelClient {
|
|
18
|
+
private client: OpenAI;
|
|
19
|
+
private config: OpenAIAdapterConfig | AzureOpenAIAdapterConfig;
|
|
20
|
+
readonly provider: string;
|
|
21
|
+
|
|
22
|
+
constructor(config: AdapterConfig) {
|
|
23
|
+
this.config = config as OpenAIAdapterConfig | AzureOpenAIAdapterConfig;
|
|
24
|
+
|
|
25
|
+
if (config.provider === 'azure-openai') {
|
|
26
|
+
const azureConfig = config as AzureOpenAIAdapterConfig;
|
|
27
|
+
this.provider = 'azure-openai';
|
|
28
|
+
|
|
29
|
+
this.client = new OpenAI({
|
|
30
|
+
apiKey: azureConfig.apiKey,
|
|
31
|
+
baseURL: `https://${azureConfig.resourceName}.openai.azure.com/openai/deployments/${azureConfig.deploymentName}`,
|
|
32
|
+
defaultQuery: { 'api-version': azureConfig.apiVersion },
|
|
33
|
+
defaultHeaders: { 'api-key': azureConfig.apiKey ?? '' },
|
|
34
|
+
timeout: azureConfig.timeout ?? 60000,
|
|
35
|
+
maxRetries: azureConfig.maxRetries ?? 2,
|
|
36
|
+
});
|
|
37
|
+
} else {
|
|
38
|
+
const openaiConfig = config as OpenAIAdapterConfig;
|
|
39
|
+
this.provider = 'openai';
|
|
40
|
+
|
|
41
|
+
this.client = new OpenAI({
|
|
42
|
+
apiKey: openaiConfig.apiKey,
|
|
43
|
+
baseURL: openaiConfig.baseUrl,
|
|
44
|
+
organization: openaiConfig.organization,
|
|
45
|
+
timeout: openaiConfig.timeout ?? 60000,
|
|
46
|
+
maxRetries: openaiConfig.maxRetries ?? 2,
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
async generate(options: GenerateOptions): Promise<GenerateResult> {
|
|
52
|
+
const startTime = Date.now();
|
|
53
|
+
const model = options.model || this.config.defaultModel || 'gpt-4';
|
|
54
|
+
|
|
55
|
+
const messages = this.normalizePrompt(options.prompt);
|
|
56
|
+
|
|
57
|
+
const response = await this.client.chat.completions.create({
|
|
58
|
+
model,
|
|
59
|
+
messages,
|
|
60
|
+
max_tokens: options.maxTokens,
|
|
61
|
+
temperature: options.temperature,
|
|
62
|
+
top_p: options.topP,
|
|
63
|
+
seed: options.seed,
|
|
64
|
+
stop: options.stop,
|
|
65
|
+
tools: options.tools,
|
|
66
|
+
response_format: options.responseFormat,
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
const latencyMs = Date.now() - startTime;
|
|
70
|
+
const choice = response.choices[0];
|
|
71
|
+
|
|
72
|
+
return {
|
|
73
|
+
id: response.id || nanoid(),
|
|
74
|
+
model: response.model,
|
|
75
|
+
text: choice.message.content || '',
|
|
76
|
+
tokens: {
|
|
77
|
+
prompt: response.usage?.prompt_tokens ?? 0,
|
|
78
|
+
completion: response.usage?.completion_tokens ?? 0,
|
|
79
|
+
total: response.usage?.total_tokens ?? 0,
|
|
80
|
+
},
|
|
81
|
+
latencyMs,
|
|
82
|
+
finishReason: this.mapFinishReason(choice.finish_reason),
|
|
83
|
+
toolCalls: choice.message.tool_calls?.map((tc) => ({
|
|
84
|
+
id: tc.id,
|
|
85
|
+
type: tc.type as 'function',
|
|
86
|
+
function: {
|
|
87
|
+
name: tc.function.name,
|
|
88
|
+
arguments: tc.function.arguments,
|
|
89
|
+
},
|
|
90
|
+
})),
|
|
91
|
+
raw: response,
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
async *stream(options: GenerateOptions, onChunk: (chunk: string) => void): AsyncIterable<string> {
|
|
96
|
+
const model = options.model || this.config.defaultModel || 'gpt-4';
|
|
97
|
+
const messages = this.normalizePrompt(options.prompt);
|
|
98
|
+
|
|
99
|
+
const stream = await this.client.chat.completions.create({
|
|
100
|
+
model,
|
|
101
|
+
messages,
|
|
102
|
+
max_tokens: options.maxTokens,
|
|
103
|
+
temperature: options.temperature,
|
|
104
|
+
stream: true,
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
for await (const chunk of stream) {
|
|
108
|
+
const content = chunk.choices[0]?.delta?.content || '';
|
|
109
|
+
if (content) {
|
|
110
|
+
onChunk(content);
|
|
111
|
+
yield content;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
async embed(text: string): Promise<number[]> {
|
|
117
|
+
const model =
|
|
118
|
+
this.config.provider === 'azure-openai' ? 'text-embedding-ada-002' : 'text-embedding-3-small';
|
|
119
|
+
|
|
120
|
+
const response = await this.client.embeddings.create({
|
|
121
|
+
model,
|
|
122
|
+
input: text,
|
|
123
|
+
});
|
|
124
|
+
return response.data[0].embedding;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
async capabilities(): Promise<ModelCapabilities> {
|
|
128
|
+
return {
|
|
129
|
+
streaming: true,
|
|
130
|
+
functionCalling: true,
|
|
131
|
+
toolUse: true,
|
|
132
|
+
maxContext: 128000,
|
|
133
|
+
vision: true,
|
|
134
|
+
jsonMode: true,
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
async close(): Promise<void> {
|
|
139
|
+
// No cleanup needed
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
private normalizePrompt(prompt: GenerateOptions['prompt']) {
|
|
143
|
+
if (typeof prompt === 'string') {
|
|
144
|
+
return [{ role: 'user' as const, content: prompt }];
|
|
145
|
+
}
|
|
146
|
+
return prompt.map((m) => ({
|
|
147
|
+
role: m.role as 'system' | 'user' | 'assistant',
|
|
148
|
+
content: m.content,
|
|
149
|
+
}));
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
private mapFinishReason(reason: string | null): GenerateResult['finishReason'] {
|
|
153
|
+
switch (reason) {
|
|
154
|
+
case 'stop':
|
|
155
|
+
return 'stop';
|
|
156
|
+
case 'length':
|
|
157
|
+
return 'length';
|
|
158
|
+
case 'tool_calls':
|
|
159
|
+
return 'tool_calls';
|
|
160
|
+
case 'content_filter':
|
|
161
|
+
return 'content_filter';
|
|
162
|
+
default:
|
|
163
|
+
return 'stop';
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
package/src/index.ts
ADDED
package/src/types.ts
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI adapter types
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
export interface OpenAIAdapterConfig {
|
|
6
|
+
provider: 'openai';
|
|
7
|
+
apiKey?: string;
|
|
8
|
+
baseUrl?: string;
|
|
9
|
+
defaultModel?: string;
|
|
10
|
+
timeout?: number;
|
|
11
|
+
maxRetries?: number;
|
|
12
|
+
organization?: string;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export interface AzureOpenAIAdapterConfig {
|
|
16
|
+
provider: 'azure-openai';
|
|
17
|
+
apiKey?: string;
|
|
18
|
+
baseUrl?: string;
|
|
19
|
+
defaultModel?: string;
|
|
20
|
+
timeout?: number;
|
|
21
|
+
maxRetries?: number;
|
|
22
|
+
resourceName: string;
|
|
23
|
+
deploymentName: string;
|
|
24
|
+
apiVersion: string;
|
|
25
|
+
}
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
{
|
|
2
|
+
"extends": "../../../tsconfig.json",
|
|
3
|
+
"compilerOptions": {
|
|
4
|
+
"outDir": "./dist",
|
|
5
|
+
"rootDir": "./src",
|
|
6
|
+
"noEmit": false,
|
|
7
|
+
"declaration": true,
|
|
8
|
+
"declarationMap": true,
|
|
9
|
+
"emitDeclarationOnly": true
|
|
10
|
+
},
|
|
11
|
+
"include": ["src/**/*"],
|
|
12
|
+
"exclude": ["node_modules", "dist", "**/*.test.ts"]
|
|
13
|
+
}
|