@ainetwork/adk-provider-model-azure 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +44 -0
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +3 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +44 -0
- package/dist/index.js.map +1 -1
- package/package.json +2 -2
package/dist/index.cjs
CHANGED
|
@@ -103,6 +103,50 @@ var AzureOpenAI = class extends import_modules.BaseModel {
|
|
|
103
103
|
}
|
|
104
104
|
return await this.fetch(messages);
|
|
105
105
|
}
|
|
106
|
+
async fetchStreamWithContextMessage(messages, functions) {
|
|
107
|
+
const stream = await this.client.chat.completions.create({
|
|
108
|
+
model: this.modelName,
|
|
109
|
+
messages,
|
|
110
|
+
tools: functions,
|
|
111
|
+
tool_choice: "auto",
|
|
112
|
+
stream: true
|
|
113
|
+
});
|
|
114
|
+
return await this.createOpenAIStreamAdapter(stream);
|
|
115
|
+
}
|
|
116
|
+
// NOTE(yoojin): Need to switch API Stream type to LLMStream.
|
|
117
|
+
createOpenAIStreamAdapter(openaiStream) {
|
|
118
|
+
return {
|
|
119
|
+
async *[Symbol.asyncIterator]() {
|
|
120
|
+
for await (const openaiChunk of openaiStream) {
|
|
121
|
+
const choice = openaiChunk.choices[0];
|
|
122
|
+
if (choice) {
|
|
123
|
+
const streamChunk = {
|
|
124
|
+
delta: {
|
|
125
|
+
role: choice.delta.role,
|
|
126
|
+
content: choice.delta.content || void 0,
|
|
127
|
+
tool_calls: choice.delta.tool_calls?.map(
|
|
128
|
+
(tc) => ({
|
|
129
|
+
index: tc.index,
|
|
130
|
+
id: tc.id,
|
|
131
|
+
type: tc.type,
|
|
132
|
+
function: tc.function
|
|
133
|
+
})
|
|
134
|
+
)
|
|
135
|
+
},
|
|
136
|
+
finish_reason: choice.finish_reason,
|
|
137
|
+
metadata: {
|
|
138
|
+
provider: "openai",
|
|
139
|
+
model: openaiChunk.model,
|
|
140
|
+
id: openaiChunk.id
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
yield streamChunk;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
},
|
|
147
|
+
metadata: { provider: "openai" }
|
|
148
|
+
};
|
|
149
|
+
}
|
|
106
150
|
convertToolsToFunctions(tools) {
|
|
107
151
|
const functions = [];
|
|
108
152
|
for (const tool of tools) {
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { ChatRole, type SessionObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tFetchResponse,\n\tIA2ATool,\n\tIAgentTool,\n\tIMCPTool,\n\tToolCall,\n} from \"@ainetwork/adk/types/tool\";\nimport { TOOL_PROTOCOL_TYPE } from \"@ainetwork/adk/types/tool\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: ChatRole) {\n\t\tswitch (role) {\n\t\t\tcase ChatRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase ChatRole.MODEL:\n\t\t\tcase ChatRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tsessionHistory?: SessionObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, sessionHistory, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !sessionHistory\n\t\t\t? []\n\t\t\t: Object.keys(sessionHistory.chats).map((chatId: string) => {\n\t\t\t\t\tconst chat = sessionHistory.chats[chatId];\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(chat.role),\n\t\t\t\t\t\tcontent: chat.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: \"auto\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tconvertToolsToFunctions(tools: IAgentTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tif (!tool.enabled) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tif (tool.protocol === TOOL_PROTOCOL_TYPE.MCP) {\n\t\t\t\tconst { mcpTool, id } = tool as IMCPTool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: mcpTool.description,\n\t\t\t\t\t\tparameters: mcpTool.inputSchema,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\t// PROTOCOL_TYPE.A2A\n\t\t\t\tconst { id, card } = tool as IA2ATool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: card.description,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAA0B;AAC1B,oBAA6C;
|
|
1
|
+
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { ChatRole, type SessionObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tIA2ATool,\n\tIAgentTool,\n\tIMCPTool,\n\tToolCall,\n} from \"@ainetwork/adk/types/tool\";\nimport { TOOL_PROTOCOL_TYPE } from \"@ainetwork/adk/types/tool\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: ChatRole) {\n\t\tswitch (role) {\n\t\t\tcase ChatRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase ChatRole.MODEL:\n\t\t\tcase ChatRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tsessionHistory?: SessionObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, sessionHistory, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !sessionHistory\n\t\t\t? []\n\t\t\t: Object.keys(sessionHistory.chats).map((chatId: string) => {\n\t\t\t\t\tconst chat = sessionHistory.chats[chatId];\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(chat.role),\n\t\t\t\t\t\tcontent: chat.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: \"auto\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t) {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: \"auto\",\n\t\t\tstream: true,\n\t\t});\n\t\treturn await this.createOpenAIStreamAdapter(stream);\n\t}\n\n\t// NOTE(yoojin): Need to switch API Stream type to LLMStream.\n\tprivate createOpenAIStreamAdapter(\n\t\topenaiStream: AsyncIterable<ChatCompletionChunk>,\n\t): LLMStream {\n\t\treturn {\n\t\t\tasync *[Symbol.asyncIterator](): AsyncIterator<StreamChunk> {\n\t\t\t\tfor await (const openaiChunk of openaiStream) {\n\t\t\t\t\tconst choice = openaiChunk.choices[0];\n\t\t\t\t\tif (choice) {\n\t\t\t\t\t\tconst streamChunk: StreamChunk = {\n\t\t\t\t\t\t\tdelta: {\n\t\t\t\t\t\t\t\trole: choice.delta.role,\n\t\t\t\t\t\t\t\tcontent: choice.delta.content || undefined,\n\t\t\t\t\t\t\t\ttool_calls: choice.delta.tool_calls?.map(\n\t\t\t\t\t\t\t\t\t(tc) =>\n\t\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\t\tindex: tc.index,\n\t\t\t\t\t\t\t\t\t\t\tid: tc.id,\n\t\t\t\t\t\t\t\t\t\t\ttype: tc.type,\n\t\t\t\t\t\t\t\t\t\t\tfunction: tc.function,\n\t\t\t\t\t\t\t\t\t\t}) as ToolCallDelta,\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tfinish_reason: choice.finish_reason as any,\n\t\t\t\t\t\t\tmetadata: {\n\t\t\t\t\t\t\t\tprovider: \"openai\",\n\t\t\t\t\t\t\t\tmodel: openaiChunk.model,\n\t\t\t\t\t\t\t\tid: openaiChunk.id,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t\tyield streamChunk;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\tmetadata: { provider: \"openai\" },\n\t\t};\n\t}\n\n\tconvertToolsToFunctions(tools: IAgentTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tif (!tool.enabled) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tif (tool.protocol === TOOL_PROTOCOL_TYPE.MCP) {\n\t\t\t\tconst { mcpTool, id } = tool as IMCPTool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: mcpTool.description,\n\t\t\t\t\t\tparameters: mcpTool.inputSchema,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\t// PROTOCOL_TYPE.A2A\n\t\t\t\tconst { id, card } = tool as IA2ATool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: card.description,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAA0B;AAC1B,oBAA6C;AAa7C,kBAAmC;AACnC,oBAAiD;AAQ1C,IAAM,cAAN,cAA0B,yBAA8C;AAAA,EACtE;AAAA,EACA;AAAA,EAER,YACC,SACA,QACA,YACA,WACC;AACD,UAAM;AACN,SAAK,SAAS,IAAI,cAAAA,YAAkB;AAAA,MACnC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACD,CAAC;AACD,SAAK,YAAY;AAAA,EAClB;AAAA,EAEQ,eAAe,MAAgB;AACtC,YAAQ,MAAM;AAAA,MACb,KAAK,uBAAS;AACb,eAAO;AAAA,MACR,KAAK,uBAAS;AAAA,MACd,KAAK,uBAAS;AACb,eAAO;AAAA,MACR;AACC,eAAO;AAAA,IACT;AAAA,EACD;AAAA,EAEA,iBAAiB,QAII;AACpB,UAAM,EAAE,OAAO,gBAAgB,aAAa,IAAI;AAChD,UAAM,WAA6B,CAAC,eACjC,CAAC,IACD,CAAC,EAAE,MAAM,UAAU,SAAS,aAAa,KAAK,EAAE,CAAC;AACpD,UAAM,iBAAmC,CAAC,iBACvC,CAAC,IACD,OAAO,KAAK,eAAe,KAAK,EAAE,IAAI,CAAC,WAAmB;AAC1D,YAAM,OAAO,eAAe,MAAM,MAAM;AACxC,aAAO;AAAA,QACN,MAAM,KAAK,eAAe,KAAK,IAAI;AAAA,QACnC,SAAS,KAAK,QAAQ,MAAM,CAAC;AAAA,MAC9B;AAAA,IACD,CAAC;AACH,UAAM,cAA8B,EAAE,MAAM,QAAQ,SAAS,MAAM;AACnE,WAAO,SAAS,OAAO,cAAc,EAAE,OAAO,WAAW;AAAA,EAC1D;AAAA,EAEA,eAAe,UAA4B,SAAuB;AACjE,aAAS,KAAK;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,UAAoD;AAC/D,UAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,KAAK;AAAA,MACZ;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ,WAAW;AAAA,IACjD;AAAA,EACD;AAAA,EAEA,MAAM,wBACL,UACA,WACyB;AACzB,QAAI,UAAU,SAAS,GAAG;AACzB,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,QAC1D,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,aAAa;AAAA,MACd,CAAC;AAED,YAAM,EAAE,SAAS,WAAW,IAAI,SAAS,QAAQ,CAAC,EAAE;AAEpD,YAAM,YAAoC,YAAY;AAAA,QACrD,CAAC,UAAyC;AACzC,iBAAO;AAAA,YACN,MAAM,MAAM,SAAS;AAAA;AAAA,YAErB,WAAW,KAAK,MAAM,MAAM,SAAS,SAAS;AAAA,UAC/C;AAAA,QACD;AAAA,MACD;AAEA,aAAO;AAAA,QACN,SAAS,WAAW;AAAA,QACpB;AAAA,MACD;AAAA,IACD;AACA,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,8BACL,UACA,WACC;AACD,UAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACxD,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA,MACb,QAAQ;AAAA,IACT,CAAC;AACD,WAAO,MAAM,KAAK,0BAA0B,MAAM;AAAA,EACnD;AAAA;AAAA,EAGQ,0BACP,cACY;AACZ,WAAO;AAAA,MACN,QAAQ,OAAO,aAAa,IAAgC;AAC3D,yBAAiB,eAAe,cAAc;AAC7C,gBAAM,SAAS,YAAY,QAAQ,CAAC;AACpC,cAAI,QAAQ;AACX,kBAAM,cAA2B;AAAA,cAChC,OAAO;AAAA,gBACN,MAAM,OAAO,MAAM;AAAA,gBACnB,SAAS,OAAO,MAAM,WAAW;AAAA,gBACjC,YAAY,OAAO,MAAM,YAAY;AAAA,kBACpC,CAAC,QACC;AAAA,oBACA,OAAO,GAAG;AAAA,oBACV,IAAI,GAAG;AAAA,oBACP,MAAM,GAAG;AAAA,oBACT,UAAU,GAAG;AAAA,kBACd;AAAA,gBACF;AAAA,cACD;AAAA,cACA,eAAe,OAAO;AAAA,cACtB,UAAU;AAAA,gBACT,UAAU;AAAA,gBACV,OAAO,YAAY;AAAA,gBACnB,IAAI,YAAY;AAAA,cACjB;AAAA,YACD;AACA,kBAAM;AAAA,UACP;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU,EAAE,UAAU,SAAS;AAAA,IAChC;AAAA,EACD;AAAA,EAEA,wBAAwB,OAA2C;AAClE,UAAM,YAAkC,CAAC;AACzC,eAAW,QAAQ,OAAO;AACzB,UAAI,CAAC,KAAK,SAAS;AAClB;AAAA,MACD;AACA,UAAI,KAAK,aAAa,+BAAmB,KAAK;AAC7C,cAAM,EAAE,SAAS,GAAG,IAAI;AACxB,kBAAU,KAAK;AAAA,UACd,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAM;AAAA,YACN,aAAa,QAAQ;AAAA,YACrB,YAAY,QAAQ;AAAA,UACrB;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AAEN,cAAM,EAAE,IAAI,KAAK,IAAI;AACrB,kBAAU,KAAK;AAAA,UACd,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAM;AAAA,YACN,aAAa,KAAK;AAAA,UACnB;AAAA,QACD,CAAC;AAAA,MACF;AAAA,IACD;AACA,WAAO;AAAA,EACR;AACD;","names":["AzureOpenAIClient"]}
|
package/dist/index.d.cts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { BaseModel } from '@ainetwork/adk/modules';
|
|
2
2
|
import { SessionObject } from '@ainetwork/adk/types/memory';
|
|
3
|
+
import { LLMStream } from '@ainetwork/adk/types/stream';
|
|
3
4
|
import { FetchResponse, IAgentTool } from '@ainetwork/adk/types/tool';
|
|
4
5
|
import { ChatCompletionMessageParam, ChatCompletionTool } from 'openai/resources';
|
|
5
6
|
|
|
@@ -16,6 +17,8 @@ declare class AzureOpenAI extends BaseModel<ChatCompletionMessageParam, ChatComp
|
|
|
16
17
|
appendMessages(messages: ChatCompletionMessageParam[], message: string): void;
|
|
17
18
|
fetch(messages: ChatCompletionMessageParam[]): Promise<FetchResponse>;
|
|
18
19
|
fetchWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<FetchResponse>;
|
|
20
|
+
fetchStreamWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<LLMStream>;
|
|
21
|
+
private createOpenAIStreamAdapter;
|
|
19
22
|
convertToolsToFunctions(tools: IAgentTool[]): ChatCompletionTool[];
|
|
20
23
|
}
|
|
21
24
|
|
package/dist/index.d.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { BaseModel } from '@ainetwork/adk/modules';
|
|
2
2
|
import { SessionObject } from '@ainetwork/adk/types/memory';
|
|
3
|
+
import { LLMStream } from '@ainetwork/adk/types/stream';
|
|
3
4
|
import { FetchResponse, IAgentTool } from '@ainetwork/adk/types/tool';
|
|
4
5
|
import { ChatCompletionMessageParam, ChatCompletionTool } from 'openai/resources';
|
|
5
6
|
|
|
@@ -16,6 +17,8 @@ declare class AzureOpenAI extends BaseModel<ChatCompletionMessageParam, ChatComp
|
|
|
16
17
|
appendMessages(messages: ChatCompletionMessageParam[], message: string): void;
|
|
17
18
|
fetch(messages: ChatCompletionMessageParam[]): Promise<FetchResponse>;
|
|
18
19
|
fetchWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<FetchResponse>;
|
|
20
|
+
fetchStreamWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<LLMStream>;
|
|
21
|
+
private createOpenAIStreamAdapter;
|
|
19
22
|
convertToolsToFunctions(tools: IAgentTool[]): ChatCompletionTool[];
|
|
20
23
|
}
|
|
21
24
|
|
package/dist/index.js
CHANGED
|
@@ -79,6 +79,50 @@ var AzureOpenAI = class extends BaseModel {
|
|
|
79
79
|
}
|
|
80
80
|
return await this.fetch(messages);
|
|
81
81
|
}
|
|
82
|
+
async fetchStreamWithContextMessage(messages, functions) {
|
|
83
|
+
const stream = await this.client.chat.completions.create({
|
|
84
|
+
model: this.modelName,
|
|
85
|
+
messages,
|
|
86
|
+
tools: functions,
|
|
87
|
+
tool_choice: "auto",
|
|
88
|
+
stream: true
|
|
89
|
+
});
|
|
90
|
+
return await this.createOpenAIStreamAdapter(stream);
|
|
91
|
+
}
|
|
92
|
+
// NOTE(yoojin): Need to switch API Stream type to LLMStream.
|
|
93
|
+
createOpenAIStreamAdapter(openaiStream) {
|
|
94
|
+
return {
|
|
95
|
+
async *[Symbol.asyncIterator]() {
|
|
96
|
+
for await (const openaiChunk of openaiStream) {
|
|
97
|
+
const choice = openaiChunk.choices[0];
|
|
98
|
+
if (choice) {
|
|
99
|
+
const streamChunk = {
|
|
100
|
+
delta: {
|
|
101
|
+
role: choice.delta.role,
|
|
102
|
+
content: choice.delta.content || void 0,
|
|
103
|
+
tool_calls: choice.delta.tool_calls?.map(
|
|
104
|
+
(tc) => ({
|
|
105
|
+
index: tc.index,
|
|
106
|
+
id: tc.id,
|
|
107
|
+
type: tc.type,
|
|
108
|
+
function: tc.function
|
|
109
|
+
})
|
|
110
|
+
)
|
|
111
|
+
},
|
|
112
|
+
finish_reason: choice.finish_reason,
|
|
113
|
+
metadata: {
|
|
114
|
+
provider: "openai",
|
|
115
|
+
model: openaiChunk.model,
|
|
116
|
+
id: openaiChunk.id
|
|
117
|
+
}
|
|
118
|
+
};
|
|
119
|
+
yield streamChunk;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
},
|
|
123
|
+
metadata: { provider: "openai" }
|
|
124
|
+
};
|
|
125
|
+
}
|
|
82
126
|
convertToolsToFunctions(tools) {
|
|
83
127
|
const functions = [];
|
|
84
128
|
for (const tool of tools) {
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { ChatRole, type SessionObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tFetchResponse,\n\tIA2ATool,\n\tIAgentTool,\n\tIMCPTool,\n\tToolCall,\n} from \"@ainetwork/adk/types/tool\";\nimport { TOOL_PROTOCOL_TYPE } from \"@ainetwork/adk/types/tool\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: ChatRole) {\n\t\tswitch (role) {\n\t\t\tcase ChatRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase ChatRole.MODEL:\n\t\t\tcase ChatRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tsessionHistory?: SessionObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, sessionHistory, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !sessionHistory\n\t\t\t? []\n\t\t\t: Object.keys(sessionHistory.chats).map((chatId: string) => {\n\t\t\t\t\tconst chat = sessionHistory.chats[chatId];\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(chat.role),\n\t\t\t\t\t\tcontent: chat.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: \"auto\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tconvertToolsToFunctions(tools: IAgentTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tif (!tool.enabled) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tif (tool.protocol === TOOL_PROTOCOL_TYPE.MCP) {\n\t\t\t\tconst { mcpTool, id } = tool as IMCPTool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: mcpTool.description,\n\t\t\t\t\t\tparameters: mcpTool.inputSchema,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\t// PROTOCOL_TYPE.A2A\n\t\t\t\tconst { id, card } = tool as IA2ATool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: card.description,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";AAAA,SAAS,iBAAiB;AAC1B,SAAS,gBAAoC;
|
|
1
|
+
{"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { ChatRole, type SessionObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tIA2ATool,\n\tIAgentTool,\n\tIMCPTool,\n\tToolCall,\n} from \"@ainetwork/adk/types/tool\";\nimport { TOOL_PROTOCOL_TYPE } from \"@ainetwork/adk/types/tool\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: ChatRole) {\n\t\tswitch (role) {\n\t\t\tcase ChatRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase ChatRole.MODEL:\n\t\t\tcase ChatRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tsessionHistory?: SessionObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, sessionHistory, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !sessionHistory\n\t\t\t? []\n\t\t\t: Object.keys(sessionHistory.chats).map((chatId: string) => {\n\t\t\t\t\tconst chat = sessionHistory.chats[chatId];\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(chat.role),\n\t\t\t\t\t\tcontent: chat.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: \"auto\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t) {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: \"auto\",\n\t\t\tstream: true,\n\t\t});\n\t\treturn await this.createOpenAIStreamAdapter(stream);\n\t}\n\n\t// NOTE(yoojin): Need to switch API Stream type to LLMStream.\n\tprivate createOpenAIStreamAdapter(\n\t\topenaiStream: AsyncIterable<ChatCompletionChunk>,\n\t): LLMStream {\n\t\treturn {\n\t\t\tasync *[Symbol.asyncIterator](): AsyncIterator<StreamChunk> {\n\t\t\t\tfor await (const openaiChunk of openaiStream) {\n\t\t\t\t\tconst choice = openaiChunk.choices[0];\n\t\t\t\t\tif (choice) {\n\t\t\t\t\t\tconst streamChunk: StreamChunk = {\n\t\t\t\t\t\t\tdelta: {\n\t\t\t\t\t\t\t\trole: choice.delta.role,\n\t\t\t\t\t\t\t\tcontent: choice.delta.content || undefined,\n\t\t\t\t\t\t\t\ttool_calls: choice.delta.tool_calls?.map(\n\t\t\t\t\t\t\t\t\t(tc) =>\n\t\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\t\tindex: tc.index,\n\t\t\t\t\t\t\t\t\t\t\tid: tc.id,\n\t\t\t\t\t\t\t\t\t\t\ttype: tc.type,\n\t\t\t\t\t\t\t\t\t\t\tfunction: tc.function,\n\t\t\t\t\t\t\t\t\t\t}) as ToolCallDelta,\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tfinish_reason: choice.finish_reason as any,\n\t\t\t\t\t\t\tmetadata: {\n\t\t\t\t\t\t\t\tprovider: \"openai\",\n\t\t\t\t\t\t\t\tmodel: openaiChunk.model,\n\t\t\t\t\t\t\t\tid: openaiChunk.id,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t\tyield streamChunk;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\tmetadata: { provider: \"openai\" },\n\t\t};\n\t}\n\n\tconvertToolsToFunctions(tools: IAgentTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tif (!tool.enabled) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tif (tool.protocol === TOOL_PROTOCOL_TYPE.MCP) {\n\t\t\t\tconst { mcpTool, id } = tool as IMCPTool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: mcpTool.description,\n\t\t\t\t\t\tparameters: mcpTool.inputSchema,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\t// PROTOCOL_TYPE.A2A\n\t\t\t\tconst { id, card } = tool as IA2ATool;\n\t\t\t\tfunctions.push({\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: id,\n\t\t\t\t\t\tdescription: card.description,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";AAAA,SAAS,iBAAiB;AAC1B,SAAS,gBAAoC;AAa7C,SAAS,0BAA0B;AACnC,SAAS,eAAe,yBAAyB;AAQ1C,IAAM,cAAN,cAA0B,UAA8C;AAAA,EACtE;AAAA,EACA;AAAA,EAER,YACC,SACA,QACA,YACA,WACC;AACD,UAAM;AACN,SAAK,SAAS,IAAI,kBAAkB;AAAA,MACnC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACD,CAAC;AACD,SAAK,YAAY;AAAA,EAClB;AAAA,EAEQ,eAAe,MAAgB;AACtC,YAAQ,MAAM;AAAA,MACb,KAAK,SAAS;AACb,eAAO;AAAA,MACR,KAAK,SAAS;AAAA,MACd,KAAK,SAAS;AACb,eAAO;AAAA,MACR;AACC,eAAO;AAAA,IACT;AAAA,EACD;AAAA,EAEA,iBAAiB,QAII;AACpB,UAAM,EAAE,OAAO,gBAAgB,aAAa,IAAI;AAChD,UAAM,WAA6B,CAAC,eACjC,CAAC,IACD,CAAC,EAAE,MAAM,UAAU,SAAS,aAAa,KAAK,EAAE,CAAC;AACpD,UAAM,iBAAmC,CAAC,iBACvC,CAAC,IACD,OAAO,KAAK,eAAe,KAAK,EAAE,IAAI,CAAC,WAAmB;AAC1D,YAAM,OAAO,eAAe,MAAM,MAAM;AACxC,aAAO;AAAA,QACN,MAAM,KAAK,eAAe,KAAK,IAAI;AAAA,QACnC,SAAS,KAAK,QAAQ,MAAM,CAAC;AAAA,MAC9B;AAAA,IACD,CAAC;AACH,UAAM,cAA8B,EAAE,MAAM,QAAQ,SAAS,MAAM;AACnE,WAAO,SAAS,OAAO,cAAc,EAAE,OAAO,WAAW;AAAA,EAC1D;AAAA,EAEA,eAAe,UAA4B,SAAuB;AACjE,aAAS,KAAK;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,UAAoD;AAC/D,UAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,KAAK;AAAA,MACZ;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ,WAAW;AAAA,IACjD;AAAA,EACD;AAAA,EAEA,MAAM,wBACL,UACA,WACyB;AACzB,QAAI,UAAU,SAAS,GAAG;AACzB,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,QAC1D,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,aAAa;AAAA,MACd,CAAC;AAED,YAAM,EAAE,SAAS,WAAW,IAAI,SAAS,QAAQ,CAAC,EAAE;AAEpD,YAAM,YAAoC,YAAY;AAAA,QACrD,CAAC,UAAyC;AACzC,iBAAO;AAAA,YACN,MAAM,MAAM,SAAS;AAAA;AAAA,YAErB,WAAW,KAAK,MAAM,MAAM,SAAS,SAAS;AAAA,UAC/C;AAAA,QACD;AAAA,MACD;AAEA,aAAO;AAAA,QACN,SAAS,WAAW;AAAA,QACpB;AAAA,MACD;AAAA,IACD;AACA,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,8BACL,UACA,WACC;AACD,UAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACxD,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA,MACb,QAAQ;AAAA,IACT,CAAC;AACD,WAAO,MAAM,KAAK,0BAA0B,MAAM;AAAA,EACnD;AAAA;AAAA,EAGQ,0BACP,cACY;AACZ,WAAO;AAAA,MACN,QAAQ,OAAO,aAAa,IAAgC;AAC3D,yBAAiB,eAAe,cAAc;AAC7C,gBAAM,SAAS,YAAY,QAAQ,CAAC;AACpC,cAAI,QAAQ;AACX,kBAAM,cAA2B;AAAA,cAChC,OAAO;AAAA,gBACN,MAAM,OAAO,MAAM;AAAA,gBACnB,SAAS,OAAO,MAAM,WAAW;AAAA,gBACjC,YAAY,OAAO,MAAM,YAAY;AAAA,kBACpC,CAAC,QACC;AAAA,oBACA,OAAO,GAAG;AAAA,oBACV,IAAI,GAAG;AAAA,oBACP,MAAM,GAAG;AAAA,oBACT,UAAU,GAAG;AAAA,kBACd;AAAA,gBACF;AAAA,cACD;AAAA,cACA,eAAe,OAAO;AAAA,cACtB,UAAU;AAAA,gBACT,UAAU;AAAA,gBACV,OAAO,YAAY;AAAA,gBACnB,IAAI,YAAY;AAAA,cACjB;AAAA,YACD;AACA,kBAAM;AAAA,UACP;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU,EAAE,UAAU,SAAS;AAAA,IAChC;AAAA,EACD;AAAA,EAEA,wBAAwB,OAA2C;AAClE,UAAM,YAAkC,CAAC;AACzC,eAAW,QAAQ,OAAO;AACzB,UAAI,CAAC,KAAK,SAAS;AAClB;AAAA,MACD;AACA,UAAI,KAAK,aAAa,mBAAmB,KAAK;AAC7C,cAAM,EAAE,SAAS,GAAG,IAAI;AACxB,kBAAU,KAAK;AAAA,UACd,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAM;AAAA,YACN,aAAa,QAAQ;AAAA,YACrB,YAAY,QAAQ;AAAA,UACrB;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AAEN,cAAM,EAAE,IAAI,KAAK,IAAI;AACrB,kBAAU,KAAK;AAAA,UACd,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAM;AAAA,YACN,aAAa,KAAK;AAAA,UACnB;AAAA,QACD,CAAC;AAAA,MACF;AAAA,IACD;AACA,WAAO;AAAA,EACR;AACD;","names":[]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ainetwork/adk-provider-model-azure",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.4",
|
|
4
4
|
"author": "AI Network (https://ainetwork.ai)",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -31,5 +31,5 @@
|
|
|
31
31
|
"publishConfig": {
|
|
32
32
|
"access": "public"
|
|
33
33
|
},
|
|
34
|
-
"gitHead": "
|
|
34
|
+
"gitHead": "179c9e22bac074444e9a1ddf656eb52d6fe8d34a"
|
|
35
35
|
}
|