@ainetwork/adk-provider-model-azure 0.2.0 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +13 -30
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +2 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +13 -30
- package/dist/index.js.map +1 -1
- package/index.ts +17 -37
- package/package.json +3 -3
package/dist/index.cjs
CHANGED
|
@@ -25,7 +25,6 @@ __export(index_exports, {
|
|
|
25
25
|
module.exports = __toCommonJS(index_exports);
|
|
26
26
|
var import_modules = require("@ainetwork/adk/modules");
|
|
27
27
|
var import_memory = require("@ainetwork/adk/types/memory");
|
|
28
|
-
var import_tool = require("@ainetwork/adk/types/tool");
|
|
29
28
|
var import_openai = require("openai");
|
|
30
29
|
var AzureOpenAI = class extends import_modules.BaseModel {
|
|
31
30
|
client;
|
|
@@ -53,11 +52,10 @@ var AzureOpenAI = class extends import_modules.BaseModel {
|
|
|
53
52
|
generateMessages(params) {
|
|
54
53
|
const { query, thread, systemPrompt } = params;
|
|
55
54
|
const messages = !systemPrompt ? [] : [{ role: "system", content: systemPrompt.trim() }];
|
|
56
|
-
const sessionContent = !thread ? [] :
|
|
57
|
-
const chat = thread.messages[chatId];
|
|
55
|
+
const sessionContent = !thread ? [] : thread.messages.map((message) => {
|
|
58
56
|
return {
|
|
59
|
-
role: this.getMessageRole(
|
|
60
|
-
content:
|
|
57
|
+
role: this.getMessageRole(message.role),
|
|
58
|
+
content: message.content.parts[0]
|
|
61
59
|
};
|
|
62
60
|
});
|
|
63
61
|
const userContent = { role: "user", content: query };
|
|
@@ -84,7 +82,7 @@ var AzureOpenAI = class extends import_modules.BaseModel {
|
|
|
84
82
|
model: this.modelName,
|
|
85
83
|
messages,
|
|
86
84
|
tools: functions,
|
|
87
|
-
tool_choice: "auto"
|
|
85
|
+
tool_choice: functions.length > 0 ? "auto" : "none"
|
|
88
86
|
});
|
|
89
87
|
const { content, tool_calls } = response.choices[0].message;
|
|
90
88
|
const toolCalls = tool_calls?.map(
|
|
@@ -108,7 +106,7 @@ var AzureOpenAI = class extends import_modules.BaseModel {
|
|
|
108
106
|
model: this.modelName,
|
|
109
107
|
messages,
|
|
110
108
|
tools: functions,
|
|
111
|
-
tool_choice: "auto",
|
|
109
|
+
tool_choice: functions.length > 0 ? "auto" : "none",
|
|
112
110
|
stream: true
|
|
113
111
|
});
|
|
114
112
|
return this.createOpenAIStreamAdapter(stream);
|
|
@@ -150,29 +148,14 @@ var AzureOpenAI = class extends import_modules.BaseModel {
|
|
|
150
148
|
convertToolsToFunctions(tools) {
|
|
151
149
|
const functions = [];
|
|
152
150
|
for (const tool of tools) {
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
name: id,
|
|
162
|
-
description: mcpTool.description,
|
|
163
|
-
parameters: mcpTool.inputSchema
|
|
164
|
-
}
|
|
165
|
-
});
|
|
166
|
-
} else {
|
|
167
|
-
const { id, card } = tool;
|
|
168
|
-
functions.push({
|
|
169
|
-
type: "function",
|
|
170
|
-
function: {
|
|
171
|
-
name: id,
|
|
172
|
-
description: card.description
|
|
173
|
-
}
|
|
174
|
-
});
|
|
175
|
-
}
|
|
151
|
+
functions.push({
|
|
152
|
+
type: "function",
|
|
153
|
+
function: {
|
|
154
|
+
name: tool.toolName,
|
|
155
|
+
description: tool.description,
|
|
156
|
+
parameters: tool.inputSchema
|
|
157
|
+
}
|
|
158
|
+
});
|
|
176
159
|
}
|
|
177
160
|
return functions;
|
|
178
161
|
}
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { MessageRole, type ThreadObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\
|
|
1
|
+
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { MessageObject, MessageRole, type ThreadObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tToolCall,\n\tConnectorTool,\n} from \"@ainetwork/adk/types/connector\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: MessageRole) {\n\t\tswitch (role) {\n\t\t\tcase MessageRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase MessageRole.MODEL:\n\t\t\tcase MessageRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tthread?: ThreadObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, thread, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !thread\n\t\t\t? []\n\t\t\t: thread.messages.map((message: MessageObject) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(message.role),\n\t\t\t\t\t\tcontent: message.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<LLMStream> {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\tstream: true,\n\t\t});\n\t\treturn this.createOpenAIStreamAdapter(stream);\n\t}\n\n\t// NOTE(yoojin): Need to switch API Stream type to LLMStream.\n\tprivate createOpenAIStreamAdapter(\n\t\topenaiStream: AsyncIterable<ChatCompletionChunk>,\n\t): LLMStream {\n\t\treturn {\n\t\t\tasync *[Symbol.asyncIterator](): AsyncIterator<StreamChunk> {\n\t\t\t\tfor await (const openaiChunk of openaiStream) {\n\t\t\t\t\tconst choice = openaiChunk.choices[0];\n\t\t\t\t\tif (choice) {\n\t\t\t\t\t\tconst streamChunk: StreamChunk = {\n\t\t\t\t\t\t\tdelta: {\n\t\t\t\t\t\t\t\trole: choice.delta.role,\n\t\t\t\t\t\t\t\tcontent: choice.delta.content || undefined,\n\t\t\t\t\t\t\t\ttool_calls: choice.delta.tool_calls?.map(\n\t\t\t\t\t\t\t\t\t(tc) =>\n\t\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\t\tindex: tc.index,\n\t\t\t\t\t\t\t\t\t\t\tid: tc.id,\n\t\t\t\t\t\t\t\t\t\t\ttype: tc.type,\n\t\t\t\t\t\t\t\t\t\t\tfunction: tc.function,\n\t\t\t\t\t\t\t\t\t\t}) as ToolCallDelta,\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tfinish_reason: choice.finish_reason as any,\n\t\t\t\t\t\t\tmetadata: {\n\t\t\t\t\t\t\t\tprovider: \"openai\",\n\t\t\t\t\t\t\t\tmodel: openaiChunk.model,\n\t\t\t\t\t\t\t\tid: openaiChunk.id,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t\tyield streamChunk;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\tmetadata: { provider: \"openai\" },\n\t\t};\n\t}\n\n\tconvertToolsToFunctions(tools: ConnectorTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tfunctions.push({\n\t\t\t\ttype: \"function\",\n\t\t\t\tfunction: {\n\t\t\t\t\tname: tool.toolName,\n\t\t\t\t\tdescription: tool.description,\n\t\t\t\t\tparameters: tool.inputSchema,\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAA0B;AAC1B,oBAA8D;AAW9D,oBAAiD;AAQ1C,IAAM,cAAN,cAA0B,yBAA8C;AAAA,EACtE;AAAA,EACA;AAAA,EAER,YACC,SACA,QACA,YACA,WACC;AACD,UAAM;AACN,SAAK,SAAS,IAAI,cAAAA,YAAkB;AAAA,MACnC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACD,CAAC;AACD,SAAK,YAAY;AAAA,EAClB;AAAA,EAEQ,eAAe,MAAmB;AACzC,YAAQ,MAAM;AAAA,MACb,KAAK,0BAAY;AAChB,eAAO;AAAA,MACR,KAAK,0BAAY;AAAA,MACjB,KAAK,0BAAY;AAChB,eAAO;AAAA,MACR;AACC,eAAO;AAAA,IACT;AAAA,EACD;AAAA,EAEA,iBAAiB,QAII;AACpB,UAAM,EAAE,OAAO,QAAQ,aAAa,IAAI;AACxC,UAAM,WAA6B,CAAC,eACjC,CAAC,IACD,CAAC,EAAE,MAAM,UAAU,SAAS,aAAa,KAAK,EAAE,CAAC;AACpD,UAAM,iBAAmC,CAAC,SACvC,CAAC,IACD,OAAO,SAAS,IAAI,CAAC,YAA2B;AAChD,aAAO;AAAA,QACN,MAAM,KAAK,eAAe,QAAQ,IAAI;AAAA,QACtC,SAAS,QAAQ,QAAQ,MAAM,CAAC;AAAA,MACjC;AAAA,IACD,CAAC;AACH,UAAM,cAA8B,EAAE,MAAM,QAAQ,SAAS,MAAM;AACnE,WAAO,SAAS,OAAO,cAAc,EAAE,OAAO,WAAW;AAAA,EAC1D;AAAA,EAEA,eAAe,UAA4B,SAAuB;AACjE,aAAS,KAAK;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,UAAoD;AAC/D,UAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,KAAK;AAAA,MACZ;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ,WAAW;AAAA,IACjD;AAAA,EACD;AAAA,EAEA,MAAM,wBACL,UACA,WACyB;AACzB,QAAI,UAAU,SAAS,GAAG;AACzB,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,QAC1D,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,MAC9C,CAAC;AAED,YAAM,EAAE,SAAS,WAAW,IAAI,SAAS,QAAQ,CAAC,EAAE;AAEpD,YAAM,YAAoC,YAAY;AAAA,QACrD,CAAC,UAAyC;AACzC,iBAAO;AAAA,YACN,MAAM,MAAM,SAAS;AAAA;AAAA,YAErB,WAAW,KAAK,MAAM,MAAM,SAAS,SAAS;AAAA,UAC/C;AAAA,QACD;AAAA,MACD;AAEA,aAAO;AAAA,QACN,SAAS,WAAW;AAAA,QACpB;AAAA,MACD;AAAA,IACD;AACA,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,8BACL,UACA,WACqB;AACrB,UAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACxD,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,MAC7C,QAAQ;AAAA,IACT,CAAC;AACD,WAAO,KAAK,0BAA0B,MAAM;AAAA,EAC7C;AAAA;AAAA,EAGQ,0BACP,cACY;AACZ,WAAO;AAAA,MACN,QAAQ,OAAO,aAAa,IAAgC;AAC3D,yBAAiB,eAAe,cAAc;AAC7C,gBAAM,SAAS,YAAY,QAAQ,CAAC;AACpC,cAAI,QAAQ;AACX,kBAAM,cAA2B;AAAA,cAChC,OAAO;AAAA,gBACN,MAAM,OAAO,MAAM;AAAA,gBACnB,SAAS,OAAO,MAAM,WAAW;AAAA,gBACjC,YAAY,OAAO,MAAM,YAAY;AAAA,kBACpC,CAAC,QACC;AAAA,oBACA,OAAO,GAAG;AAAA,oBACV,IAAI,GAAG;AAAA,oBACP,MAAM,GAAG;AAAA,oBACT,UAAU,GAAG;AAAA,kBACd;AAAA,gBACF;AAAA,cACD;AAAA,cACA,eAAe,OAAO;AAAA,cACtB,UAAU;AAAA,gBACT,UAAU;AAAA,gBACV,OAAO,YAAY;AAAA,gBACnB,IAAI,YAAY;AAAA,cACjB;AAAA,YACD;AACA,kBAAM;AAAA,UACP;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU,EAAE,UAAU,SAAS;AAAA,IAChC;AAAA,EACD;AAAA,EAEA,wBAAwB,OAA8C;AACrE,UAAM,YAAkC,CAAC;AACzC,eAAW,QAAQ,OAAO;AACzB,gBAAU,KAAK;AAAA,QACd,MAAM;AAAA,QACN,UAAU;AAAA,UACT,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QAClB;AAAA,MACD,CAAC;AAAA,IACF;AACA,WAAO;AAAA,EACR;AACD;","names":["AzureOpenAIClient"]}
|
package/dist/index.d.cts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { BaseModel } from '@ainetwork/adk/modules';
|
|
2
2
|
import { ThreadObject } from '@ainetwork/adk/types/memory';
|
|
3
3
|
import { LLMStream } from '@ainetwork/adk/types/stream';
|
|
4
|
-
import { FetchResponse,
|
|
4
|
+
import { FetchResponse, ConnectorTool } from '@ainetwork/adk/types/connector';
|
|
5
5
|
import { ChatCompletionMessageParam, ChatCompletionTool } from 'openai/resources';
|
|
6
6
|
|
|
7
7
|
declare class AzureOpenAI extends BaseModel<ChatCompletionMessageParam, ChatCompletionTool> {
|
|
@@ -19,7 +19,7 @@ declare class AzureOpenAI extends BaseModel<ChatCompletionMessageParam, ChatComp
|
|
|
19
19
|
fetchWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<FetchResponse>;
|
|
20
20
|
fetchStreamWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<LLMStream>;
|
|
21
21
|
private createOpenAIStreamAdapter;
|
|
22
|
-
convertToolsToFunctions(tools:
|
|
22
|
+
convertToolsToFunctions(tools: ConnectorTool[]): ChatCompletionTool[];
|
|
23
23
|
}
|
|
24
24
|
|
|
25
25
|
export { AzureOpenAI };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { BaseModel } from '@ainetwork/adk/modules';
|
|
2
2
|
import { ThreadObject } from '@ainetwork/adk/types/memory';
|
|
3
3
|
import { LLMStream } from '@ainetwork/adk/types/stream';
|
|
4
|
-
import { FetchResponse,
|
|
4
|
+
import { FetchResponse, ConnectorTool } from '@ainetwork/adk/types/connector';
|
|
5
5
|
import { ChatCompletionMessageParam, ChatCompletionTool } from 'openai/resources';
|
|
6
6
|
|
|
7
7
|
declare class AzureOpenAI extends BaseModel<ChatCompletionMessageParam, ChatCompletionTool> {
|
|
@@ -19,7 +19,7 @@ declare class AzureOpenAI extends BaseModel<ChatCompletionMessageParam, ChatComp
|
|
|
19
19
|
fetchWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<FetchResponse>;
|
|
20
20
|
fetchStreamWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<LLMStream>;
|
|
21
21
|
private createOpenAIStreamAdapter;
|
|
22
|
-
convertToolsToFunctions(tools:
|
|
22
|
+
convertToolsToFunctions(tools: ConnectorTool[]): ChatCompletionTool[];
|
|
23
23
|
}
|
|
24
24
|
|
|
25
25
|
export { AzureOpenAI };
|
package/dist/index.js
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
// index.ts
|
|
2
2
|
import { BaseModel } from "@ainetwork/adk/modules";
|
|
3
3
|
import { MessageRole } from "@ainetwork/adk/types/memory";
|
|
4
|
-
import { TOOL_PROTOCOL_TYPE } from "@ainetwork/adk/types/tool";
|
|
5
4
|
import { AzureOpenAI as AzureOpenAIClient } from "openai";
|
|
6
5
|
var AzureOpenAI = class extends BaseModel {
|
|
7
6
|
client;
|
|
@@ -29,11 +28,10 @@ var AzureOpenAI = class extends BaseModel {
|
|
|
29
28
|
generateMessages(params) {
|
|
30
29
|
const { query, thread, systemPrompt } = params;
|
|
31
30
|
const messages = !systemPrompt ? [] : [{ role: "system", content: systemPrompt.trim() }];
|
|
32
|
-
const sessionContent = !thread ? [] :
|
|
33
|
-
const chat = thread.messages[chatId];
|
|
31
|
+
const sessionContent = !thread ? [] : thread.messages.map((message) => {
|
|
34
32
|
return {
|
|
35
|
-
role: this.getMessageRole(
|
|
36
|
-
content:
|
|
33
|
+
role: this.getMessageRole(message.role),
|
|
34
|
+
content: message.content.parts[0]
|
|
37
35
|
};
|
|
38
36
|
});
|
|
39
37
|
const userContent = { role: "user", content: query };
|
|
@@ -60,7 +58,7 @@ var AzureOpenAI = class extends BaseModel {
|
|
|
60
58
|
model: this.modelName,
|
|
61
59
|
messages,
|
|
62
60
|
tools: functions,
|
|
63
|
-
tool_choice: "auto"
|
|
61
|
+
tool_choice: functions.length > 0 ? "auto" : "none"
|
|
64
62
|
});
|
|
65
63
|
const { content, tool_calls } = response.choices[0].message;
|
|
66
64
|
const toolCalls = tool_calls?.map(
|
|
@@ -84,7 +82,7 @@ var AzureOpenAI = class extends BaseModel {
|
|
|
84
82
|
model: this.modelName,
|
|
85
83
|
messages,
|
|
86
84
|
tools: functions,
|
|
87
|
-
tool_choice: "auto",
|
|
85
|
+
tool_choice: functions.length > 0 ? "auto" : "none",
|
|
88
86
|
stream: true
|
|
89
87
|
});
|
|
90
88
|
return this.createOpenAIStreamAdapter(stream);
|
|
@@ -126,29 +124,14 @@ var AzureOpenAI = class extends BaseModel {
|
|
|
126
124
|
convertToolsToFunctions(tools) {
|
|
127
125
|
const functions = [];
|
|
128
126
|
for (const tool of tools) {
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
name: id,
|
|
138
|
-
description: mcpTool.description,
|
|
139
|
-
parameters: mcpTool.inputSchema
|
|
140
|
-
}
|
|
141
|
-
});
|
|
142
|
-
} else {
|
|
143
|
-
const { id, card } = tool;
|
|
144
|
-
functions.push({
|
|
145
|
-
type: "function",
|
|
146
|
-
function: {
|
|
147
|
-
name: id,
|
|
148
|
-
description: card.description
|
|
149
|
-
}
|
|
150
|
-
});
|
|
151
|
-
}
|
|
127
|
+
functions.push({
|
|
128
|
+
type: "function",
|
|
129
|
+
function: {
|
|
130
|
+
name: tool.toolName,
|
|
131
|
+
description: tool.description,
|
|
132
|
+
parameters: tool.inputSchema
|
|
133
|
+
}
|
|
134
|
+
});
|
|
152
135
|
}
|
|
153
136
|
return functions;
|
|
154
137
|
}
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { MessageRole, type ThreadObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\
|
|
1
|
+
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { MessageObject, MessageRole, type ThreadObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tToolCall,\n\tConnectorTool,\n} from \"@ainetwork/adk/types/connector\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: MessageRole) {\n\t\tswitch (role) {\n\t\t\tcase MessageRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase MessageRole.MODEL:\n\t\t\tcase MessageRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tthread?: ThreadObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, thread, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !thread\n\t\t\t? []\n\t\t\t: thread.messages.map((message: MessageObject) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(message.role),\n\t\t\t\t\t\tcontent: message.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<LLMStream> {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\tstream: true,\n\t\t});\n\t\treturn this.createOpenAIStreamAdapter(stream);\n\t}\n\n\t// NOTE(yoojin): Need to switch API Stream type to LLMStream.\n\tprivate createOpenAIStreamAdapter(\n\t\topenaiStream: AsyncIterable<ChatCompletionChunk>,\n\t): LLMStream {\n\t\treturn {\n\t\t\tasync *[Symbol.asyncIterator](): AsyncIterator<StreamChunk> {\n\t\t\t\tfor await (const openaiChunk of openaiStream) {\n\t\t\t\t\tconst choice = openaiChunk.choices[0];\n\t\t\t\t\tif (choice) {\n\t\t\t\t\t\tconst streamChunk: StreamChunk = {\n\t\t\t\t\t\t\tdelta: {\n\t\t\t\t\t\t\t\trole: choice.delta.role,\n\t\t\t\t\t\t\t\tcontent: choice.delta.content || undefined,\n\t\t\t\t\t\t\t\ttool_calls: choice.delta.tool_calls?.map(\n\t\t\t\t\t\t\t\t\t(tc) =>\n\t\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\t\tindex: tc.index,\n\t\t\t\t\t\t\t\t\t\t\tid: tc.id,\n\t\t\t\t\t\t\t\t\t\t\ttype: tc.type,\n\t\t\t\t\t\t\t\t\t\t\tfunction: tc.function,\n\t\t\t\t\t\t\t\t\t\t}) as ToolCallDelta,\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tfinish_reason: choice.finish_reason as any,\n\t\t\t\t\t\t\tmetadata: {\n\t\t\t\t\t\t\t\tprovider: \"openai\",\n\t\t\t\t\t\t\t\tmodel: openaiChunk.model,\n\t\t\t\t\t\t\t\tid: openaiChunk.id,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t\tyield streamChunk;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\tmetadata: { provider: \"openai\" },\n\t\t};\n\t}\n\n\tconvertToolsToFunctions(tools: ConnectorTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tfunctions.push({\n\t\t\t\ttype: \"function\",\n\t\t\t\tfunction: {\n\t\t\t\t\tname: tool.toolName,\n\t\t\t\t\tdescription: tool.description,\n\t\t\t\t\tparameters: tool.inputSchema,\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";AAAA,SAAS,iBAAiB;AAC1B,SAAwB,mBAAsC;AAW9D,SAAS,eAAe,yBAAyB;AAQ1C,IAAM,cAAN,cAA0B,UAA8C;AAAA,EACtE;AAAA,EACA;AAAA,EAER,YACC,SACA,QACA,YACA,WACC;AACD,UAAM;AACN,SAAK,SAAS,IAAI,kBAAkB;AAAA,MACnC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACD,CAAC;AACD,SAAK,YAAY;AAAA,EAClB;AAAA,EAEQ,eAAe,MAAmB;AACzC,YAAQ,MAAM;AAAA,MACb,KAAK,YAAY;AAChB,eAAO;AAAA,MACR,KAAK,YAAY;AAAA,MACjB,KAAK,YAAY;AAChB,eAAO;AAAA,MACR;AACC,eAAO;AAAA,IACT;AAAA,EACD;AAAA,EAEA,iBAAiB,QAII;AACpB,UAAM,EAAE,OAAO,QAAQ,aAAa,IAAI;AACxC,UAAM,WAA6B,CAAC,eACjC,CAAC,IACD,CAAC,EAAE,MAAM,UAAU,SAAS,aAAa,KAAK,EAAE,CAAC;AACpD,UAAM,iBAAmC,CAAC,SACvC,CAAC,IACD,OAAO,SAAS,IAAI,CAAC,YAA2B;AAChD,aAAO;AAAA,QACN,MAAM,KAAK,eAAe,QAAQ,IAAI;AAAA,QACtC,SAAS,QAAQ,QAAQ,MAAM,CAAC;AAAA,MACjC;AAAA,IACD,CAAC;AACH,UAAM,cAA8B,EAAE,MAAM,QAAQ,SAAS,MAAM;AACnE,WAAO,SAAS,OAAO,cAAc,EAAE,OAAO,WAAW;AAAA,EAC1D;AAAA,EAEA,eAAe,UAA4B,SAAuB;AACjE,aAAS,KAAK;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,UAAoD;AAC/D,UAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,KAAK;AAAA,MACZ;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ,WAAW;AAAA,IACjD;AAAA,EACD;AAAA,EAEA,MAAM,wBACL,UACA,WACyB;AACzB,QAAI,UAAU,SAAS,GAAG;AACzB,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,QAC1D,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,MAC9C,CAAC;AAED,YAAM,EAAE,SAAS,WAAW,IAAI,SAAS,QAAQ,CAAC,EAAE;AAEpD,YAAM,YAAoC,YAAY;AAAA,QACrD,CAAC,UAAyC;AACzC,iBAAO;AAAA,YACN,MAAM,MAAM,SAAS;AAAA;AAAA,YAErB,WAAW,KAAK,MAAM,MAAM,SAAS,SAAS;AAAA,UAC/C;AAAA,QACD;AAAA,MACD;AAEA,aAAO;AAAA,QACN,SAAS,WAAW;AAAA,QACpB;AAAA,MACD;AAAA,IACD;AACA,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,8BACL,UACA,WACqB;AACrB,UAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACxD,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,MAC7C,QAAQ;AAAA,IACT,CAAC;AACD,WAAO,KAAK,0BAA0B,MAAM;AAAA,EAC7C;AAAA;AAAA,EAGQ,0BACP,cACY;AACZ,WAAO;AAAA,MACN,QAAQ,OAAO,aAAa,IAAgC;AAC3D,yBAAiB,eAAe,cAAc;AAC7C,gBAAM,SAAS,YAAY,QAAQ,CAAC;AACpC,cAAI,QAAQ;AACX,kBAAM,cAA2B;AAAA,cAChC,OAAO;AAAA,gBACN,MAAM,OAAO,MAAM;AAAA,gBACnB,SAAS,OAAO,MAAM,WAAW;AAAA,gBACjC,YAAY,OAAO,MAAM,YAAY;AAAA,kBACpC,CAAC,QACC;AAAA,oBACA,OAAO,GAAG;AAAA,oBACV,IAAI,GAAG;AAAA,oBACP,MAAM,GAAG;AAAA,oBACT,UAAU,GAAG;AAAA,kBACd;AAAA,gBACF;AAAA,cACD;AAAA,cACA,eAAe,OAAO;AAAA,cACtB,UAAU;AAAA,gBACT,UAAU;AAAA,gBACV,OAAO,YAAY;AAAA,gBACnB,IAAI,YAAY;AAAA,cACjB;AAAA,YACD;AACA,kBAAM;AAAA,UACP;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU,EAAE,UAAU,SAAS;AAAA,IAChC;AAAA,EACD;AAAA,EAEA,wBAAwB,OAA8C;AACrE,UAAM,YAAkC,CAAC;AACzC,eAAW,QAAQ,OAAO;AACzB,gBAAU,KAAK;AAAA,QACd,MAAM;AAAA,QACN,UAAU;AAAA,UACT,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QAClB;AAAA,MACD,CAAC;AAAA,IACF;AACA,WAAO;AAAA,EACR;AACD;","names":[]}
|
package/index.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { BaseModel } from "@ainetwork/adk/modules";
|
|
2
|
-
import { MessageRole, type ThreadObject } from "@ainetwork/adk/types/memory";
|
|
2
|
+
import { MessageObject, MessageRole, type ThreadObject } from "@ainetwork/adk/types/memory";
|
|
3
3
|
import type {
|
|
4
4
|
LLMStream,
|
|
5
5
|
StreamChunk,
|
|
@@ -7,12 +7,9 @@ import type {
|
|
|
7
7
|
} from "@ainetwork/adk/types/stream";
|
|
8
8
|
import type {
|
|
9
9
|
FetchResponse,
|
|
10
|
-
IA2ATool,
|
|
11
|
-
IAgentTool,
|
|
12
|
-
IMCPTool,
|
|
13
10
|
ToolCall,
|
|
14
|
-
|
|
15
|
-
|
|
11
|
+
ConnectorTool,
|
|
12
|
+
} from "@ainetwork/adk/types/connector";
|
|
16
13
|
import { AzureOpenAI as AzureOpenAIClient } from "openai";
|
|
17
14
|
import type {
|
|
18
15
|
ChatCompletionMessageParam as CCMessageParam,
|
|
@@ -63,11 +60,10 @@ export class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {
|
|
|
63
60
|
: [{ role: "system", content: systemPrompt.trim() }];
|
|
64
61
|
const sessionContent: CCMessageParam[] = !thread
|
|
65
62
|
? []
|
|
66
|
-
:
|
|
67
|
-
const chat = thread.messages[chatId];
|
|
63
|
+
: thread.messages.map((message: MessageObject) => {
|
|
68
64
|
return {
|
|
69
|
-
role: this.getMessageRole(
|
|
70
|
-
content:
|
|
65
|
+
role: this.getMessageRole(message.role),
|
|
66
|
+
content: message.content.parts[0],
|
|
71
67
|
};
|
|
72
68
|
});
|
|
73
69
|
const userContent: CCMessageParam = { role: "user", content: query };
|
|
@@ -101,7 +97,7 @@ export class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {
|
|
|
101
97
|
model: this.modelName,
|
|
102
98
|
messages,
|
|
103
99
|
tools: functions,
|
|
104
|
-
tool_choice: "auto",
|
|
100
|
+
tool_choice: functions.length > 0 ? "auto" : "none",
|
|
105
101
|
});
|
|
106
102
|
|
|
107
103
|
const { content, tool_calls } = response.choices[0].message;
|
|
@@ -132,7 +128,7 @@ export class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {
|
|
|
132
128
|
model: this.modelName,
|
|
133
129
|
messages,
|
|
134
130
|
tools: functions,
|
|
135
|
-
tool_choice: "auto",
|
|
131
|
+
tool_choice: functions.length > 0 ? "auto" : "none",
|
|
136
132
|
stream: true,
|
|
137
133
|
});
|
|
138
134
|
return this.createOpenAIStreamAdapter(stream);
|
|
@@ -176,33 +172,17 @@ export class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {
|
|
|
176
172
|
};
|
|
177
173
|
}
|
|
178
174
|
|
|
179
|
-
convertToolsToFunctions(tools:
|
|
175
|
+
convertToolsToFunctions(tools: ConnectorTool[]): ChatCompletionTool[] {
|
|
180
176
|
const functions: ChatCompletionTool[] = [];
|
|
181
177
|
for (const tool of tools) {
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
name: id,
|
|
191
|
-
description: mcpTool.description,
|
|
192
|
-
parameters: mcpTool.inputSchema,
|
|
193
|
-
},
|
|
194
|
-
});
|
|
195
|
-
} else {
|
|
196
|
-
// PROTOCOL_TYPE.A2A
|
|
197
|
-
const { id, card } = tool as IA2ATool;
|
|
198
|
-
functions.push({
|
|
199
|
-
type: "function",
|
|
200
|
-
function: {
|
|
201
|
-
name: id,
|
|
202
|
-
description: card.description,
|
|
203
|
-
},
|
|
204
|
-
});
|
|
205
|
-
}
|
|
178
|
+
functions.push({
|
|
179
|
+
type: "function",
|
|
180
|
+
function: {
|
|
181
|
+
name: tool.toolName,
|
|
182
|
+
description: tool.description,
|
|
183
|
+
parameters: tool.inputSchema,
|
|
184
|
+
},
|
|
185
|
+
});
|
|
206
186
|
}
|
|
207
187
|
return functions;
|
|
208
188
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ainetwork/adk-provider-model-azure",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.2",
|
|
4
4
|
"author": "AI Network (https://ainetwork.ai)",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -21,7 +21,7 @@
|
|
|
21
21
|
"clean": "rm -rf dist"
|
|
22
22
|
},
|
|
23
23
|
"dependencies": {
|
|
24
|
-
"@ainetwork/adk": "^0.2.
|
|
24
|
+
"@ainetwork/adk": "^0.2.7",
|
|
25
25
|
"openai": "^5.10.2"
|
|
26
26
|
},
|
|
27
27
|
"devDependencies": {
|
|
@@ -31,5 +31,5 @@
|
|
|
31
31
|
"publishConfig": {
|
|
32
32
|
"access": "public"
|
|
33
33
|
},
|
|
34
|
-
"gitHead": "
|
|
34
|
+
"gitHead": "e0021453227b0c9c0cd1d21efa0395e8e1f6740e"
|
|
35
35
|
}
|