@ainetwork/adk-provider-model-azure 0.1.4 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1 -1
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +1 -1
- package/dist/index.js.map +1 -1
- package/index.ts +2 -2
- package/package.json +3 -3
package/dist/index.cjs
CHANGED
|
@@ -111,7 +111,7 @@ var AzureOpenAI = class extends import_modules.BaseModel {
|
|
|
111
111
|
tool_choice: "auto",
|
|
112
112
|
stream: true
|
|
113
113
|
});
|
|
114
|
-
return
|
|
114
|
+
return this.createOpenAIStreamAdapter(stream);
|
|
115
115
|
}
|
|
116
116
|
// NOTE(yoojin): Need to switch API Stream type to LLMStream.
|
|
117
117
|
createOpenAIStreamAdapter(openaiStream) {
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { ChatRole, type SessionObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tIA2ATool,\n\tIAgentTool,\n\tIMCPTool,\n\tToolCall,\n} from \"@ainetwork/adk/types/tool\";\nimport { TOOL_PROTOCOL_TYPE } from \"@ainetwork/adk/types/tool\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: ChatRole) {\n\t\tswitch (role) {\n\t\t\tcase ChatRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase ChatRole.MODEL:\n\t\t\tcase ChatRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tsessionHistory?: SessionObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, sessionHistory, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !sessionHistory\n\t\t\t? []\n\t\t\t: Object.keys(sessionHistory.chats).map((chatId: string) => {\n\t\t\t\t\tconst chat = sessionHistory.chats[chatId];\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(chat.role),\n\t\t\t\t\t\tcontent: chat.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: \"auto\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t) {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: \"auto\",\n\t\t\tstream: true,\n\t\t});\n\t\treturn
|
|
1
|
+
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { ChatRole, type SessionObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tIA2ATool,\n\tIAgentTool,\n\tIMCPTool,\n\tToolCall,\n} from \"@ainetwork/adk/types/tool\";\nimport { TOOL_PROTOCOL_TYPE } from \"@ainetwork/adk/types/tool\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: ChatRole) {\n\t\tswitch (role) {\n\t\t\tcase ChatRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase ChatRole.MODEL:\n\t\t\tcase ChatRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tsessionHistory?: SessionObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, sessionHistory, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !sessionHistory\n\t\t\t? []\n\t\t\t: Object.keys(sessionHistory.chats).map((chatId: string) => {\n\t\t\t\t\tconst chat = sessionHistory.chats[chatId];\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(chat.role),\n\t\t\t\t\t\tcontent: chat.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: \"auto\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<LLMStream> {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: \"auto\",\n\t\t\tstream: true,\n\t\t});\n\t\treturn this.createOpenAIStreamAdapter(stream);\n\t}\n\n\t// NOTE(yoojin): Need to switch API Stream type to LLMStream.\n\tprivate createOpenAIStreamAdapter(\n\t\topenaiStream: AsyncIterable<ChatCompletionChunk>,\n\t): LLMStream {\n\t\treturn {\n\t\t\tasync *[Symbol.asyncIterator](): AsyncIterator<StreamChunk> {\n\t\t\t\tfor await (const openaiChunk of openaiStream) {\n\t\t\t\t\tconst choice = openaiChunk.choices[0];\n\t\t\t\t\tif (choice) {\n\t\t\t\t\t\tconst streamChunk: StreamChunk = {\n\t\t\t\t\t\t\tdelta: {\n\t\t\t\t\t\t\t\trole: choice.delta.role,\n\t\t\t\t\t\t\t\tcontent: choice.delta.content || undefined,\n\t\t\t\t\t\t\t\ttool_calls: choice.delta.tool_calls?.map(\n\t\t\t\t\t\t\t\t\t(tc) =>\n\t\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\t\tindex: tc.index,\n\t\t\t\t\t\t\t\t\t\t\tid: tc.id,\n\t\t\t\t\t\t\t\t\t\t\ttype: tc.type,\n\t\t\t\t\t\t\t\t\t\t\tfunction: tc.function,\n\t\t\t\t\t\t\t\t\t\t}) as ToolCallDelta,\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tfinish_reason: choice.finish_reason as any,\n\t\t\t\t\t\t\tmetadata: {\n\t\t\t\t\t\t\t\tprovider: \"openai\",\n\t\t\t\t\t\t\t\tmodel: openaiChunk.model,\n\t\t\t\t\t\t\t\tid: openaiChunk.id,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t\tyield streamChunk;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\tmetadata: { provider: \"openai\" },\n\t\t};\n\t}\n\n\tconvertToolsToFunctions(tools: IAgentTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tif (!tool.enabled) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tif (tool.protocol === TOOL_PROTOCOL_TYPE.MCP) {\n\t\t\t\tconst { mcpTool, id } = tool as IMCPTool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: mcpTool.description,\n\t\t\t\t\t\tparameters: mcpTool.inputSchema,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\t// PROTOCOL_TYPE.A2A\n\t\t\t\tconst { id, card } = tool as IA2ATool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: card.description,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAA0B;AAC1B,oBAA6C;AAa7C,kBAAmC;AACnC,oBAAiD;AAQ1C,IAAM,cAAN,cAA0B,yBAA8C;AAAA,EACtE;AAAA,EACA;AAAA,EAER,YACC,SACA,QACA,YACA,WACC;AACD,UAAM;AACN,SAAK,SAAS,IAAI,cAAAA,YAAkB;AAAA,MACnC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACD,CAAC;AACD,SAAK,YAAY;AAAA,EAClB;AAAA,EAEQ,eAAe,MAAgB;AACtC,YAAQ,MAAM;AAAA,MACb,KAAK,uBAAS;AACb,eAAO;AAAA,MACR,KAAK,uBAAS;AAAA,MACd,KAAK,uBAAS;AACb,eAAO;AAAA,MACR;AACC,eAAO;AAAA,IACT;AAAA,EACD;AAAA,EAEA,iBAAiB,QAII;AACpB,UAAM,EAAE,OAAO,gBAAgB,aAAa,IAAI;AAChD,UAAM,WAA6B,CAAC,eACjC,CAAC,IACD,CAAC,EAAE,MAAM,UAAU,SAAS,aAAa,KAAK,EAAE,CAAC;AACpD,UAAM,iBAAmC,CAAC,iBACvC,CAAC,IACD,OAAO,KAAK,eAAe,KAAK,EAAE,IAAI,CAAC,WAAmB;AAC1D,YAAM,OAAO,eAAe,MAAM,MAAM;AACxC,aAAO;AAAA,QACN,MAAM,KAAK,eAAe,KAAK,IAAI;AAAA,QACnC,SAAS,KAAK,QAAQ,MAAM,CAAC;AAAA,MAC9B;AAAA,IACD,CAAC;AACH,UAAM,cAA8B,EAAE,MAAM,QAAQ,SAAS,MAAM;AACnE,WAAO,SAAS,OAAO,cAAc,EAAE,OAAO,WAAW;AAAA,EAC1D;AAAA,EAEA,eAAe,UAA4B,SAAuB;AACjE,aAAS,KAAK;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,UAAoD;AAC/D,UAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,KAAK;AAAA,MACZ;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ,WAAW;AAAA,IACjD;AAAA,EACD;AAAA,EAEA,MAAM,wBACL,UACA,WACyB;AACzB,QAAI,UAAU,SAAS,GAAG;AACzB,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,QAC1D,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,aAAa;AAAA,MACd,CAAC;AAED,YAAM,EAAE,SAAS,WAAW,IAAI,SAAS,QAAQ,CAAC,EAAE;AAEpD,YAAM,YAAoC,YAAY;AAAA,QACrD,CAAC,UAAyC;AACzC,iBAAO;AAAA,YACN,MAAM,MAAM,SAAS;AAAA;AAAA,YAErB,WAAW,KAAK,MAAM,MAAM,SAAS,SAAS;AAAA,UAC/C;AAAA,QACD;AAAA,MACD;AAEA,aAAO;AAAA,QACN,SAAS,WAAW;AAAA,QACpB;AAAA,MACD;AAAA,IACD;AACA,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,8BACL,UACA,WACqB;AACrB,UAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACxD,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA,MACb,QAAQ;AAAA,IACT,CAAC;AACD,WAAO,KAAK,0BAA0B,MAAM;AAAA,EAC7C;AAAA;AAAA,EAGQ,0BACP,cACY;AACZ,WAAO;AAAA,MACN,QAAQ,OAAO,aAAa,IAAgC;AAC3D,yBAAiB,eAAe,cAAc;AAC7C,gBAAM,SAAS,YAAY,QAAQ,CAAC;AACpC,cAAI,QAAQ;AACX,kBAAM,cAA2B;AAAA,cAChC,OAAO;AAAA,gBACN,MAAM,OAAO,MAAM;AAAA,gBACnB,SAAS,OAAO,MAAM,WAAW;AAAA,gBACjC,YAAY,OAAO,MAAM,YAAY;AAAA,kBACpC,CAAC,QACC;AAAA,oBACA,OAAO,GAAG;AAAA,oBACV,IAAI,GAAG;AAAA,oBACP,MAAM,GAAG;AAAA,oBACT,UAAU,GAAG;AAAA,kBACd;AAAA,gBACF;AAAA,cACD;AAAA,cACA,eAAe,OAAO;AAAA,cACtB,UAAU;AAAA,gBACT,UAAU;AAAA,gBACV,OAAO,YAAY;AAAA,gBACnB,IAAI,YAAY;AAAA,cACjB;AAAA,YACD;AACA,kBAAM;AAAA,UACP;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU,EAAE,UAAU,SAAS;AAAA,IAChC;AAAA,EACD;AAAA,EAEA,wBAAwB,OAA2C;AAClE,UAAM,YAAkC,CAAC;AACzC,eAAW,QAAQ,OAAO;AACzB,UAAI,CAAC,KAAK,SAAS;AAClB;AAAA,MACD;AACA,UAAI,KAAK,aAAa,+BAAmB,KAAK;AAC7C,cAAM,EAAE,SAAS,GAAG,IAAI;AACxB,kBAAU,KAAK;AAAA,UACd,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAM;AAAA,YACN,aAAa,QAAQ;AAAA,YACrB,YAAY,QAAQ;AAAA,UACrB;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AAEN,cAAM,EAAE,IAAI,KAAK,IAAI;AACrB,kBAAU,KAAK;AAAA,UACd,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAM;AAAA,YACN,aAAa,KAAK;AAAA,UACnB;AAAA,QACD,CAAC;AAAA,MACF;AAAA,IACD;AACA,WAAO;AAAA,EACR;AACD;","names":["AzureOpenAIClient"]}
|
package/dist/index.js
CHANGED
|
@@ -87,7 +87,7 @@ var AzureOpenAI = class extends BaseModel {
|
|
|
87
87
|
tool_choice: "auto",
|
|
88
88
|
stream: true
|
|
89
89
|
});
|
|
90
|
-
return
|
|
90
|
+
return this.createOpenAIStreamAdapter(stream);
|
|
91
91
|
}
|
|
92
92
|
// NOTE(yoojin): Need to switch API Stream type to LLMStream.
|
|
93
93
|
createOpenAIStreamAdapter(openaiStream) {
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { ChatRole, type SessionObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tIA2ATool,\n\tIAgentTool,\n\tIMCPTool,\n\tToolCall,\n} from \"@ainetwork/adk/types/tool\";\nimport { TOOL_PROTOCOL_TYPE } from \"@ainetwork/adk/types/tool\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: ChatRole) {\n\t\tswitch (role) {\n\t\t\tcase ChatRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase ChatRole.MODEL:\n\t\t\tcase ChatRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tsessionHistory?: SessionObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, sessionHistory, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !sessionHistory\n\t\t\t? []\n\t\t\t: Object.keys(sessionHistory.chats).map((chatId: string) => {\n\t\t\t\t\tconst chat = sessionHistory.chats[chatId];\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(chat.role),\n\t\t\t\t\t\tcontent: chat.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: \"auto\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t) {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: \"auto\",\n\t\t\tstream: true,\n\t\t});\n\t\treturn
|
|
1
|
+
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { ChatRole, type SessionObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tIA2ATool,\n\tIAgentTool,\n\tIMCPTool,\n\tToolCall,\n} from \"@ainetwork/adk/types/tool\";\nimport { TOOL_PROTOCOL_TYPE } from \"@ainetwork/adk/types/tool\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: ChatRole) {\n\t\tswitch (role) {\n\t\t\tcase ChatRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase ChatRole.MODEL:\n\t\t\tcase ChatRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tsessionHistory?: SessionObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, sessionHistory, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !sessionHistory\n\t\t\t? []\n\t\t\t: Object.keys(sessionHistory.chats).map((chatId: string) => {\n\t\t\t\t\tconst chat = sessionHistory.chats[chatId];\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(chat.role),\n\t\t\t\t\t\tcontent: chat.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: \"auto\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<LLMStream> {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: \"auto\",\n\t\t\tstream: true,\n\t\t});\n\t\treturn this.createOpenAIStreamAdapter(stream);\n\t}\n\n\t// NOTE(yoojin): Need to switch API Stream type to LLMStream.\n\tprivate createOpenAIStreamAdapter(\n\t\topenaiStream: AsyncIterable<ChatCompletionChunk>,\n\t): LLMStream {\n\t\treturn {\n\t\t\tasync *[Symbol.asyncIterator](): AsyncIterator<StreamChunk> {\n\t\t\t\tfor await (const openaiChunk of openaiStream) {\n\t\t\t\t\tconst choice = openaiChunk.choices[0];\n\t\t\t\t\tif (choice) {\n\t\t\t\t\t\tconst streamChunk: StreamChunk = {\n\t\t\t\t\t\t\tdelta: {\n\t\t\t\t\t\t\t\trole: choice.delta.role,\n\t\t\t\t\t\t\t\tcontent: choice.delta.content || undefined,\n\t\t\t\t\t\t\t\ttool_calls: choice.delta.tool_calls?.map(\n\t\t\t\t\t\t\t\t\t(tc) =>\n\t\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\t\tindex: tc.index,\n\t\t\t\t\t\t\t\t\t\t\tid: tc.id,\n\t\t\t\t\t\t\t\t\t\t\ttype: tc.type,\n\t\t\t\t\t\t\t\t\t\t\tfunction: tc.function,\n\t\t\t\t\t\t\t\t\t\t}) as ToolCallDelta,\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tfinish_reason: choice.finish_reason as any,\n\t\t\t\t\t\t\tmetadata: {\n\t\t\t\t\t\t\t\tprovider: \"openai\",\n\t\t\t\t\t\t\t\tmodel: openaiChunk.model,\n\t\t\t\t\t\t\t\tid: openaiChunk.id,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t\tyield streamChunk;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\tmetadata: { provider: \"openai\" },\n\t\t};\n\t}\n\n\tconvertToolsToFunctions(tools: IAgentTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tif (!tool.enabled) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tif (tool.protocol === TOOL_PROTOCOL_TYPE.MCP) {\n\t\t\t\tconst { mcpTool, id } = tool as IMCPTool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: mcpTool.description,\n\t\t\t\t\t\tparameters: mcpTool.inputSchema,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\t// PROTOCOL_TYPE.A2A\n\t\t\t\tconst { id, card } = tool as IA2ATool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: card.description,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";AAAA,SAAS,iBAAiB;AAC1B,SAAS,gBAAoC;AAa7C,SAAS,0BAA0B;AACnC,SAAS,eAAe,yBAAyB;AAQ1C,IAAM,cAAN,cAA0B,UAA8C;AAAA,EACtE;AAAA,EACA;AAAA,EAER,YACC,SACA,QACA,YACA,WACC;AACD,UAAM;AACN,SAAK,SAAS,IAAI,kBAAkB;AAAA,MACnC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACD,CAAC;AACD,SAAK,YAAY;AAAA,EAClB;AAAA,EAEQ,eAAe,MAAgB;AACtC,YAAQ,MAAM;AAAA,MACb,KAAK,SAAS;AACb,eAAO;AAAA,MACR,KAAK,SAAS;AAAA,MACd,KAAK,SAAS;AACb,eAAO;AAAA,MACR;AACC,eAAO;AAAA,IACT;AAAA,EACD;AAAA,EAEA,iBAAiB,QAII;AACpB,UAAM,EAAE,OAAO,gBAAgB,aAAa,IAAI;AAChD,UAAM,WAA6B,CAAC,eACjC,CAAC,IACD,CAAC,EAAE,MAAM,UAAU,SAAS,aAAa,KAAK,EAAE,CAAC;AACpD,UAAM,iBAAmC,CAAC,iBACvC,CAAC,IACD,OAAO,KAAK,eAAe,KAAK,EAAE,IAAI,CAAC,WAAmB;AAC1D,YAAM,OAAO,eAAe,MAAM,MAAM;AACxC,aAAO;AAAA,QACN,MAAM,KAAK,eAAe,KAAK,IAAI;AAAA,QACnC,SAAS,KAAK,QAAQ,MAAM,CAAC;AAAA,MAC9B;AAAA,IACD,CAAC;AACH,UAAM,cAA8B,EAAE,MAAM,QAAQ,SAAS,MAAM;AACnE,WAAO,SAAS,OAAO,cAAc,EAAE,OAAO,WAAW;AAAA,EAC1D;AAAA,EAEA,eAAe,UAA4B,SAAuB;AACjE,aAAS,KAAK;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,UAAoD;AAC/D,UAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,KAAK;AAAA,MACZ;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ,WAAW;AAAA,IACjD;AAAA,EACD;AAAA,EAEA,MAAM,wBACL,UACA,WACyB;AACzB,QAAI,UAAU,SAAS,GAAG;AACzB,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,QAC1D,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,aAAa;AAAA,MACd,CAAC;AAED,YAAM,EAAE,SAAS,WAAW,IAAI,SAAS,QAAQ,CAAC,EAAE;AAEpD,YAAM,YAAoC,YAAY;AAAA,QACrD,CAAC,UAAyC;AACzC,iBAAO;AAAA,YACN,MAAM,MAAM,SAAS;AAAA;AAAA,YAErB,WAAW,KAAK,MAAM,MAAM,SAAS,SAAS;AAAA,UAC/C;AAAA,QACD;AAAA,MACD;AAEA,aAAO;AAAA,QACN,SAAS,WAAW;AAAA,QACpB;AAAA,MACD;AAAA,IACD;AACA,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,8BACL,UACA,WACqB;AACrB,UAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACxD,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA,MACb,QAAQ;AAAA,IACT,CAAC;AACD,WAAO,KAAK,0BAA0B,MAAM;AAAA,EAC7C;AAAA;AAAA,EAGQ,0BACP,cACY;AACZ,WAAO;AAAA,MACN,QAAQ,OAAO,aAAa,IAAgC;AAC3D,yBAAiB,eAAe,cAAc;AAC7C,gBAAM,SAAS,YAAY,QAAQ,CAAC;AACpC,cAAI,QAAQ;AACX,kBAAM,cAA2B;AAAA,cAChC,OAAO;AAAA,gBACN,MAAM,OAAO,MAAM;AAAA,gBACnB,SAAS,OAAO,MAAM,WAAW;AAAA,gBACjC,YAAY,OAAO,MAAM,YAAY;AAAA,kBACpC,CAAC,QACC;AAAA,oBACA,OAAO,GAAG;AAAA,oBACV,IAAI,GAAG;AAAA,oBACP,MAAM,GAAG;AAAA,oBACT,UAAU,GAAG;AAAA,kBACd;AAAA,gBACF;AAAA,cACD;AAAA,cACA,eAAe,OAAO;AAAA,cACtB,UAAU;AAAA,gBACT,UAAU;AAAA,gBACV,OAAO,YAAY;AAAA,gBACnB,IAAI,YAAY;AAAA,cACjB;AAAA,YACD;AACA,kBAAM;AAAA,UACP;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU,EAAE,UAAU,SAAS;AAAA,IAChC;AAAA,EACD;AAAA,EAEA,wBAAwB,OAA2C;AAClE,UAAM,YAAkC,CAAC;AACzC,eAAW,QAAQ,OAAO;AACzB,UAAI,CAAC,KAAK,SAAS;AAClB;AAAA,MACD;AACA,UAAI,KAAK,aAAa,mBAAmB,KAAK;AAC7C,cAAM,EAAE,SAAS,GAAG,IAAI;AACxB,kBAAU,KAAK;AAAA,UACd,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAM;AAAA,YACN,aAAa,QAAQ;AAAA,YACrB,YAAY,QAAQ;AAAA,UACrB;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AAEN,cAAM,EAAE,IAAI,KAAK,IAAI;AACrB,kBAAU,KAAK;AAAA,UACd,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAM;AAAA,YACN,aAAa,KAAK;AAAA,UACnB;AAAA,QACD,CAAC;AAAA,MACF;AAAA,IACD;AACA,WAAO;AAAA,EACR;AACD;","names":[]}
|
package/index.ts
CHANGED
|
@@ -127,7 +127,7 @@ export class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {
|
|
|
127
127
|
async fetchStreamWithContextMessage(
|
|
128
128
|
messages: CCMessageParam[],
|
|
129
129
|
functions: ChatCompletionTool[],
|
|
130
|
-
) {
|
|
130
|
+
): Promise<LLMStream> {
|
|
131
131
|
const stream = await this.client.chat.completions.create({
|
|
132
132
|
model: this.modelName,
|
|
133
133
|
messages,
|
|
@@ -135,7 +135,7 @@ export class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {
|
|
|
135
135
|
tool_choice: "auto",
|
|
136
136
|
stream: true,
|
|
137
137
|
});
|
|
138
|
-
return
|
|
138
|
+
return this.createOpenAIStreamAdapter(stream);
|
|
139
139
|
}
|
|
140
140
|
|
|
141
141
|
// NOTE(yoojin): Need to switch API Stream type to LLMStream.
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ainetwork/adk-provider-model-azure",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.5",
|
|
4
4
|
"author": "AI Network (https://ainetwork.ai)",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -21,7 +21,7 @@
|
|
|
21
21
|
"clean": "rm -rf dist"
|
|
22
22
|
},
|
|
23
23
|
"dependencies": {
|
|
24
|
-
"@ainetwork/adk": "^0.1.
|
|
24
|
+
"@ainetwork/adk": "^0.1.9",
|
|
25
25
|
"openai": "^5.10.2"
|
|
26
26
|
},
|
|
27
27
|
"devDependencies": {
|
|
@@ -31,5 +31,5 @@
|
|
|
31
31
|
"publishConfig": {
|
|
32
32
|
"access": "public"
|
|
33
33
|
},
|
|
34
|
-
"gitHead": "
|
|
34
|
+
"gitHead": "66a5fbc5f9a509bd412ff8dc91b002862133fbae"
|
|
35
35
|
}
|