@ainetwork/adk-provider-model-azure 0.2.4 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -67,30 +67,35 @@ var AzureOpenAI = class extends import_modules.BaseModel {
67
67
  content: message
68
68
  });
69
69
  }
70
- async fetch(messages) {
70
+ async fetch(messages, options) {
71
71
  const response = await this.client.chat.completions.create({
72
72
  model: this.modelName,
73
- messages
73
+ messages,
74
+ reasoning_effort: options?.reasoning,
75
+ verbosity: options?.verbosity
74
76
  });
75
77
  return {
76
78
  content: response.choices[0].message.content || void 0
77
79
  };
78
80
  }
79
- async fetchWithContextMessage(messages, functions) {
81
+ async fetchWithContextMessage(messages, functions, options) {
80
82
  if (functions.length > 0) {
81
83
  const response = await this.client.chat.completions.create({
82
84
  model: this.modelName,
83
85
  messages,
84
86
  tools: functions,
85
- tool_choice: functions.length > 0 ? "auto" : "none"
87
+ tool_choice: functions.length > 0 ? "auto" : "none",
88
+ reasoning_effort: options?.reasoning,
89
+ verbosity: options?.verbosity
86
90
  });
87
91
  const { content, tool_calls } = response.choices[0].message;
88
92
  const toolCalls = tool_calls?.map(
89
93
  (value) => {
94
+ const v = value;
90
95
  return {
91
- name: value.function.name,
96
+ name: v.function.name,
92
97
  // FIXME: value.function.arguments could not be a valid JSON
93
- arguments: JSON.parse(value.function.arguments)
98
+ arguments: JSON.parse(v.function.arguments)
94
99
  };
95
100
  }
96
101
  );
@@ -101,13 +106,15 @@ var AzureOpenAI = class extends import_modules.BaseModel {
101
106
  }
102
107
  return await this.fetch(messages);
103
108
  }
104
- async fetchStreamWithContextMessage(messages, functions) {
109
+ async fetchStreamWithContextMessage(messages, functions, options) {
105
110
  const stream = await this.client.chat.completions.create({
106
111
  model: this.modelName,
107
112
  messages,
108
113
  tools: functions,
109
114
  tool_choice: functions.length > 0 ? "auto" : "none",
110
- stream: true
115
+ stream: true,
116
+ reasoning_effort: options?.reasoning,
117
+ verbosity: options?.verbosity
111
118
  });
112
119
  return this.createOpenAIStreamAdapter(stream);
113
120
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { MessageObject, MessageRole, type ThreadObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tToolCall,\n\tConnectorTool,\n} from \"@ainetwork/adk/types/connector\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: MessageRole) {\n\t\tswitch (role) {\n\t\t\tcase MessageRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase MessageRole.MODEL:\n\t\t\tcase MessageRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tthread?: ThreadObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, thread, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !thread\n\t\t\t? []\n\t\t\t: thread.messages.map((message: MessageObject) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(message.role),\n\t\t\t\t\t\tcontent: message.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<LLMStream> {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\tstream: true,\n\t\t});\n\t\treturn this.createOpenAIStreamAdapter(stream);\n\t}\n\n\t// NOTE(yoojin): Need to switch API Stream type to LLMStream.\n\tprivate createOpenAIStreamAdapter(\n\t\topenaiStream: AsyncIterable<ChatCompletionChunk>,\n\t): LLMStream {\n\t\treturn {\n\t\t\tasync *[Symbol.asyncIterator](): AsyncIterator<StreamChunk> {\n\t\t\t\tfor await (const openaiChunk of openaiStream) {\n\t\t\t\t\tconst choice = openaiChunk.choices[0];\n\t\t\t\t\tif (choice) {\n\t\t\t\t\t\tconst streamChunk: StreamChunk = {\n\t\t\t\t\t\t\tdelta: {\n\t\t\t\t\t\t\t\trole: choice.delta.role,\n\t\t\t\t\t\t\t\tcontent: choice.delta.content || undefined,\n\t\t\t\t\t\t\t\ttool_calls: choice.delta.tool_calls?.map(\n\t\t\t\t\t\t\t\t\t(tc) =>\n\t\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\t\tindex: tc.index,\n\t\t\t\t\t\t\t\t\t\t\tid: tc.id,\n\t\t\t\t\t\t\t\t\t\t\ttype: tc.type,\n\t\t\t\t\t\t\t\t\t\t\tfunction: tc.function,\n\t\t\t\t\t\t\t\t\t\t}) as ToolCallDelta,\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tfinish_reason: choice.finish_reason as any,\n\t\t\t\t\t\t\tmetadata: {\n\t\t\t\t\t\t\t\tprovider: \"openai\",\n\t\t\t\t\t\t\t\tmodel: openaiChunk.model,\n\t\t\t\t\t\t\t\tid: openaiChunk.id,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t\tyield streamChunk;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\tmetadata: { provider: \"openai\" },\n\t\t};\n\t}\n\n\tconvertToolsToFunctions(tools: ConnectorTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tfunctions.push({\n\t\t\t\ttype: \"function\",\n\t\t\t\tfunction: {\n\t\t\t\t\tname: tool.toolName,\n\t\t\t\t\tdescription: tool.description,\n\t\t\t\t\tparameters: tool.inputSchema,\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAA0B;AAC1B,oBAA8D;AAW9D,oBAAiD;AAQ1C,IAAM,cAAN,cAA0B,yBAA8C;AAAA,EACtE;AAAA,EACA;AAAA,EAER,YACC,SACA,QACA,YACA,WACC;AACD,UAAM;AACN,SAAK,SAAS,IAAI,cAAAA,YAAkB;AAAA,MACnC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACD,CAAC;AACD,SAAK,YAAY;AAAA,EAClB;AAAA,EAEQ,eAAe,MAAmB;AACzC,YAAQ,MAAM;AAAA,MACb,KAAK,0BAAY;AAChB,eAAO;AAAA,MACR,KAAK,0BAAY;AAAA,MACjB,KAAK,0BAAY;AAChB,eAAO;AAAA,MACR;AACC,eAAO;AAAA,IACT;AAAA,EACD;AAAA,EAEA,iBAAiB,QAII;AACpB,UAAM,EAAE,OAAO,QAAQ,aAAa,IAAI;AACxC,UAAM,WAA6B,CAAC,eACjC,CAAC,IACD,CAAC,EAAE,MAAM,UAAU,SAAS,aAAa,KAAK,EAAE,CAAC;AACpD,UAAM,iBAAmC,CAAC,SACvC,CAAC,IACD,OAAO,SAAS,IAAI,CAAC,YAA2B;AAChD,aAAO;AAAA,QACN,MAAM,KAAK,eAAe,QAAQ,IAAI;AAAA,QACtC,SAAS,QAAQ,QAAQ,MAAM,CAAC;AAAA,MACjC;AAAA,IACD,CAAC;AACH,UAAM,cAA8B,EAAE,MAAM,QAAQ,SAAS,MAAM;AACnE,WAAO,SAAS,OAAO,cAAc,EAAE,OAAO,WAAW;AAAA,EAC1D;AAAA,EAEA,eAAe,UAA4B,SAAuB;AACjE,aAAS,KAAK;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,UAAoD;AAC/D,UAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,KAAK;AAAA,MACZ;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ,WAAW;AAAA,IACjD;AAAA,EACD;AAAA,EAEA,MAAM,wBACL,UACA,WACyB;AACzB,QAAI,UAAU,SAAS,GAAG;AACzB,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,QAC1D,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,MAC9C,CAAC;AAED,YAAM,EAAE,SAAS,WAAW,IAAI,SAAS,QAAQ,CAAC,EAAE;AAEpD,YAAM,YAAoC,YAAY;AAAA,QACrD,CAAC,UAAyC;AACzC,iBAAO;AAAA,YACN,MAAM,MAAM,SAAS;AAAA;AAAA,YAErB,WAAW,KAAK,MAAM,MAAM,SAAS,SAAS;AAAA,UAC/C;AAAA,QACD;AAAA,MACD;AAEA,aAAO;AAAA,QACN,SAAS,WAAW;AAAA,QACpB;AAAA,MACD;AAAA,IACD;AACA,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,8BACL,UACA,WACqB;AACrB,UAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACxD,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,MAC7C,QAAQ;AAAA,IACT,CAAC;AACD,WAAO,KAAK,0BAA0B,MAAM;AAAA,EAC7C;AAAA;AAAA,EAGQ,0BACP,cACY;AACZ,WAAO;AAAA,MACN,QAAQ,OAAO,aAAa,IAAgC;AAC3D,yBAAiB,eAAe,cAAc;AAC7C,gBAAM,SAAS,YAAY,QAAQ,CAAC;AACpC,cAAI,QAAQ;AACX,kBAAM,cAA2B;AAAA,cAChC,OAAO;AAAA,gBACN,MAAM,OAAO,MAAM;AAAA,gBACnB,SAAS,OAAO,MAAM,WAAW;AAAA,gBACjC,YAAY,OAAO,MAAM,YAAY;AAAA,kBACpC,CAAC,QACC;AAAA,oBACA,OAAO,GAAG;AAAA,oBACV,IAAI,GAAG;AAAA,oBACP,MAAM,GAAG;AAAA,oBACT,UAAU,GAAG;AAAA,kBACd;AAAA,gBACF;AAAA,cACD;AAAA,cACA,eAAe,OAAO;AAAA,cACtB,UAAU;AAAA,gBACT,UAAU;AAAA,gBACV,OAAO,YAAY;AAAA,gBACnB,IAAI,YAAY;AAAA,cACjB;AAAA,YACD;AACA,kBAAM;AAAA,UACP;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU,EAAE,UAAU,SAAS;AAAA,IAChC;AAAA,EACD;AAAA,EAEA,wBAAwB,OAA8C;AACrE,UAAM,YAAkC,CAAC;AACzC,eAAW,QAAQ,OAAO;AACzB,gBAAU,KAAK;AAAA,QACd,MAAM;AAAA,QACN,UAAU;AAAA,UACT,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QAClB;AAAA,MACD,CAAC;AAAA,IACF;AACA,WAAO;AAAA,EACR;AACD;","names":["AzureOpenAIClient"]}
1
+ {"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel, ModelFetchOptions } from \"@ainetwork/adk/modules\";\nimport { MessageObject, MessageRole, type ThreadObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tToolCall,\n\tConnectorTool,\n} from \"@ainetwork/adk/types/connector\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageFunctionToolCall,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: MessageRole) {\n\t\tswitch (role) {\n\t\t\tcase MessageRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase MessageRole.MODEL:\n\t\t\tcase MessageRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tthread?: ThreadObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, thread, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !thread\n\t\t\t? []\n\t\t\t: thread.messages.map((message: MessageObject) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(message.role),\n\t\t\t\t\t\tcontent: message.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(\n\t\tmessages: CCMessageParam[],\n\t\toptions?: ModelFetchOptions,\n\t): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\treasoning_effort: options?.reasoning,\n\t\t\tverbosity: options?.verbosity,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t\toptions?: ModelFetchOptions,\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\t\treasoning_effort: options?.reasoning,\n\t\t\t\tverbosity: options?.verbosity,\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\tconst v = value as ChatCompletionMessageFunctionToolCall;\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: v.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(v.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t\toptions?: ModelFetchOptions,\n\t): Promise<LLMStream> {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\tstream: true,\n\t\t\treasoning_effort: options?.reasoning,\n\t\t\tverbosity: options?.verbosity,\n\t\t});\n\t\treturn this.createOpenAIStreamAdapter(stream);\n\t}\n\n\t// NOTE(yoojin): Need to switch API Stream type to LLMStream.\n\tprivate createOpenAIStreamAdapter(\n\t\topenaiStream: AsyncIterable<ChatCompletionChunk>,\n\t): LLMStream {\n\t\treturn {\n\t\t\tasync *[Symbol.asyncIterator](): AsyncIterator<StreamChunk> {\n\t\t\t\tfor await (const openaiChunk of openaiStream) {\n\t\t\t\t\tconst choice = openaiChunk.choices[0];\n\t\t\t\t\tif (choice) {\n\t\t\t\t\t\tconst streamChunk: StreamChunk = {\n\t\t\t\t\t\t\tdelta: {\n\t\t\t\t\t\t\t\trole: choice.delta.role,\n\t\t\t\t\t\t\t\tcontent: choice.delta.content || undefined,\n\t\t\t\t\t\t\t\ttool_calls: choice.delta.tool_calls?.map(\n\t\t\t\t\t\t\t\t\t(tc) =>\n\t\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\t\tindex: tc.index,\n\t\t\t\t\t\t\t\t\t\t\tid: tc.id,\n\t\t\t\t\t\t\t\t\t\t\ttype: tc.type,\n\t\t\t\t\t\t\t\t\t\t\tfunction: tc.function,\n\t\t\t\t\t\t\t\t\t\t}) as ToolCallDelta,\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tfinish_reason: choice.finish_reason as any,\n\t\t\t\t\t\t\tmetadata: {\n\t\t\t\t\t\t\t\tprovider: \"openai\",\n\t\t\t\t\t\t\t\tmodel: openaiChunk.model,\n\t\t\t\t\t\t\t\tid: openaiChunk.id,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t\tyield streamChunk;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\tmetadata: { provider: \"openai\" },\n\t\t};\n\t}\n\n\tconvertToolsToFunctions(tools: ConnectorTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tfunctions.push({\n\t\t\t\ttype: \"function\",\n\t\t\t\tfunction: {\n\t\t\t\t\tname: tool.toolName,\n\t\t\t\t\tdescription: tool.description,\n\t\t\t\t\tparameters: tool.inputSchema,\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAA6C;AAC7C,oBAA8D;AAW9D,oBAAiD;AAS1C,IAAM,cAAN,cAA0B,yBAA8C;AAAA,EACtE;AAAA,EACA;AAAA,EAER,YACC,SACA,QACA,YACA,WACC;AACD,UAAM;AACN,SAAK,SAAS,IAAI,cAAAA,YAAkB;AAAA,MACnC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACD,CAAC;AACD,SAAK,YAAY;AAAA,EAClB;AAAA,EAEQ,eAAe,MAAmB;AACzC,YAAQ,MAAM;AAAA,MACb,KAAK,0BAAY;AAChB,eAAO;AAAA,MACR,KAAK,0BAAY;AAAA,MACjB,KAAK,0BAAY;AAChB,eAAO;AAAA,MACR;AACC,eAAO;AAAA,IACT;AAAA,EACD;AAAA,EAEA,iBAAiB,QAII;AACpB,UAAM,EAAE,OAAO,QAAQ,aAAa,IAAI;AACxC,UAAM,WAA6B,CAAC,eACjC,CAAC,IACD,CAAC,EAAE,MAAM,UAAU,SAAS,aAAa,KAAK,EAAE,CAAC;AACpD,UAAM,iBAAmC,CAAC,SACvC,CAAC,IACD,OAAO,SAAS,IAAI,CAAC,YAA2B;AAChD,aAAO;AAAA,QACN,MAAM,KAAK,eAAe,QAAQ,IAAI;AAAA,QACtC,SAAS,QAAQ,QAAQ,MAAM,CAAC;AAAA,MACjC;AAAA,IACD,CAAC;AACH,UAAM,cAA8B,EAAE,MAAM,QAAQ,SAAS,MAAM;AACnE,WAAO,SAAS,OAAO,cAAc,EAAE,OAAO,WAAW;AAAA,EAC1D;AAAA,EAEA,eAAe,UAA4B,SAAuB;AACjE,aAAS,KAAK;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,MACL,UACA,SACyB;AACzB,UAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,kBAAkB,SAAS;AAAA,MAC3B,WAAW,SAAS;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACN,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ,WAAW;AAAA,IACjD;AAAA,EACD;AAAA,EAEA,MAAM,wBACL,UACA,WACA,SACyB;AACzB,QAAI,UAAU,SAAS,GAAG;AACzB,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,QAC1D,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,QAC7C,kBAAkB,SAAS;AAAA,QAC3B,WAAW,SAAS;AAAA,MACrB,CAAC;AAED,YAAM,EAAE,SAAS,WAAW,IAAI,SAAS,QAAQ,CAAC,EAAE;AAEpD,YAAM,YAAoC,YAAY;AAAA,QACrD,CAAC,UAAyC;AACzC,gBAAM,IAAI;AACV,iBAAO;AAAA,YACN,MAAM,EAAE,SAAS;AAAA;AAAA,YAEjB,WAAW,KAAK,MAAM,EAAE,SAAS,SAAS;AAAA,UAC3C;AAAA,QACD;AAAA,MACD;AAEA,aAAO;AAAA,QACN,SAAS,WAAW;AAAA,QACpB;AAAA,MACD;AAAA,IACD;AACA,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,8BACL,UACA,WACA,SACqB;AACrB,UAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACxD,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,MAC7C,QAAQ;AAAA,MACR,kBAAkB,SAAS;AAAA,MAC3B,WAAW,SAAS;AAAA,IACrB,CAAC;AACD,WAAO,KAAK,0BAA0B,MAAM;AAAA,EAC7C;AAAA;AAAA,EAGQ,0BACP,cACY;AACZ,WAAO;AAAA,MACN,QAAQ,OAAO,aAAa,IAAgC;AAC3D,yBAAiB,eAAe,cAAc;AAC7C,gBAAM,SAAS,YAAY,QAAQ,CAAC;AACpC,cAAI,QAAQ;AACX,kBAAM,cAA2B;AAAA,cAChC,OAAO;AAAA,gBACN,MAAM,OAAO,MAAM;AAAA,gBACnB,SAAS,OAAO,MAAM,WAAW;AAAA,gBACjC,YAAY,OAAO,MAAM,YAAY;AAAA,kBACpC,CAAC,QACC;AAAA,oBACA,OAAO,GAAG;AAAA,oBACV,IAAI,GAAG;AAAA,oBACP,MAAM,GAAG;AAAA,oBACT,UAAU,GAAG;AAAA,kBACd;AAAA,gBACF;AAAA,cACD;AAAA,cACA,eAAe,OAAO;AAAA,cACtB,UAAU;AAAA,gBACT,UAAU;AAAA,gBACV,OAAO,YAAY;AAAA,gBACnB,IAAI,YAAY;AAAA,cACjB;AAAA,YACD;AACA,kBAAM;AAAA,UACP;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU,EAAE,UAAU,SAAS;AAAA,IAChC;AAAA,EACD;AAAA,EAEA,wBAAwB,OAA8C;AACrE,UAAM,YAAkC,CAAC;AACzC,eAAW,QAAQ,OAAO;AACzB,gBAAU,KAAK;AAAA,QACd,MAAM;AAAA,QACN,UAAU;AAAA,UACT,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QAClB;AAAA,MACD,CAAC;AAAA,IACF;AACA,WAAO;AAAA,EACR;AACD;","names":["AzureOpenAIClient"]}
package/dist/index.d.cts CHANGED
@@ -1,4 +1,4 @@
1
- import { BaseModel } from '@ainetwork/adk/modules';
1
+ import { BaseModel, ModelFetchOptions } from '@ainetwork/adk/modules';
2
2
  import { ThreadObject } from '@ainetwork/adk/types/memory';
3
3
  import { LLMStream } from '@ainetwork/adk/types/stream';
4
4
  import { FetchResponse, ConnectorTool } from '@ainetwork/adk/types/connector';
@@ -15,9 +15,9 @@ declare class AzureOpenAI extends BaseModel<ChatCompletionMessageParam, ChatComp
15
15
  systemPrompt?: string;
16
16
  }): ChatCompletionMessageParam[];
17
17
  appendMessages(messages: ChatCompletionMessageParam[], message: string): void;
18
- fetch(messages: ChatCompletionMessageParam[]): Promise<FetchResponse>;
19
- fetchWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<FetchResponse>;
20
- fetchStreamWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<LLMStream>;
18
+ fetch(messages: ChatCompletionMessageParam[], options?: ModelFetchOptions): Promise<FetchResponse>;
19
+ fetchWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[], options?: ModelFetchOptions): Promise<FetchResponse>;
20
+ fetchStreamWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[], options?: ModelFetchOptions): Promise<LLMStream>;
21
21
  private createOpenAIStreamAdapter;
22
22
  convertToolsToFunctions(tools: ConnectorTool[]): ChatCompletionTool[];
23
23
  }
package/dist/index.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { BaseModel } from '@ainetwork/adk/modules';
1
+ import { BaseModel, ModelFetchOptions } from '@ainetwork/adk/modules';
2
2
  import { ThreadObject } from '@ainetwork/adk/types/memory';
3
3
  import { LLMStream } from '@ainetwork/adk/types/stream';
4
4
  import { FetchResponse, ConnectorTool } from '@ainetwork/adk/types/connector';
@@ -15,9 +15,9 @@ declare class AzureOpenAI extends BaseModel<ChatCompletionMessageParam, ChatComp
15
15
  systemPrompt?: string;
16
16
  }): ChatCompletionMessageParam[];
17
17
  appendMessages(messages: ChatCompletionMessageParam[], message: string): void;
18
- fetch(messages: ChatCompletionMessageParam[]): Promise<FetchResponse>;
19
- fetchWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<FetchResponse>;
20
- fetchStreamWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[]): Promise<LLMStream>;
18
+ fetch(messages: ChatCompletionMessageParam[], options?: ModelFetchOptions): Promise<FetchResponse>;
19
+ fetchWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[], options?: ModelFetchOptions): Promise<FetchResponse>;
20
+ fetchStreamWithContextMessage(messages: ChatCompletionMessageParam[], functions: ChatCompletionTool[], options?: ModelFetchOptions): Promise<LLMStream>;
21
21
  private createOpenAIStreamAdapter;
22
22
  convertToolsToFunctions(tools: ConnectorTool[]): ChatCompletionTool[];
23
23
  }
package/dist/index.js CHANGED
@@ -43,30 +43,35 @@ var AzureOpenAI = class extends BaseModel {
43
43
  content: message
44
44
  });
45
45
  }
46
- async fetch(messages) {
46
+ async fetch(messages, options) {
47
47
  const response = await this.client.chat.completions.create({
48
48
  model: this.modelName,
49
- messages
49
+ messages,
50
+ reasoning_effort: options?.reasoning,
51
+ verbosity: options?.verbosity
50
52
  });
51
53
  return {
52
54
  content: response.choices[0].message.content || void 0
53
55
  };
54
56
  }
55
- async fetchWithContextMessage(messages, functions) {
57
+ async fetchWithContextMessage(messages, functions, options) {
56
58
  if (functions.length > 0) {
57
59
  const response = await this.client.chat.completions.create({
58
60
  model: this.modelName,
59
61
  messages,
60
62
  tools: functions,
61
- tool_choice: functions.length > 0 ? "auto" : "none"
63
+ tool_choice: functions.length > 0 ? "auto" : "none",
64
+ reasoning_effort: options?.reasoning,
65
+ verbosity: options?.verbosity
62
66
  });
63
67
  const { content, tool_calls } = response.choices[0].message;
64
68
  const toolCalls = tool_calls?.map(
65
69
  (value) => {
70
+ const v = value;
66
71
  return {
67
- name: value.function.name,
72
+ name: v.function.name,
68
73
  // FIXME: value.function.arguments could not be a valid JSON
69
- arguments: JSON.parse(value.function.arguments)
74
+ arguments: JSON.parse(v.function.arguments)
70
75
  };
71
76
  }
72
77
  );
@@ -77,13 +82,15 @@ var AzureOpenAI = class extends BaseModel {
77
82
  }
78
83
  return await this.fetch(messages);
79
84
  }
80
- async fetchStreamWithContextMessage(messages, functions) {
85
+ async fetchStreamWithContextMessage(messages, functions, options) {
81
86
  const stream = await this.client.chat.completions.create({
82
87
  model: this.modelName,
83
88
  messages,
84
89
  tools: functions,
85
90
  tool_choice: functions.length > 0 ? "auto" : "none",
86
- stream: true
91
+ stream: true,
92
+ reasoning_effort: options?.reasoning,
93
+ verbosity: options?.verbosity
87
94
  });
88
95
  return this.createOpenAIStreamAdapter(stream);
89
96
  }
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel } from \"@ainetwork/adk/modules\";\nimport { MessageObject, MessageRole, type ThreadObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tToolCall,\n\tConnectorTool,\n} from \"@ainetwork/adk/types/connector\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: MessageRole) {\n\t\tswitch (role) {\n\t\t\tcase MessageRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase MessageRole.MODEL:\n\t\t\tcase MessageRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tthread?: ThreadObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, thread, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !thread\n\t\t\t? []\n\t\t\t: thread.messages.map((message: MessageObject) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(message.role),\n\t\t\t\t\t\tcontent: message.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(messages: CCMessageParam[]): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: value.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(value.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t): Promise<LLMStream> {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\tstream: true,\n\t\t});\n\t\treturn this.createOpenAIStreamAdapter(stream);\n\t}\n\n\t// NOTE(yoojin): Need to switch API Stream type to LLMStream.\n\tprivate createOpenAIStreamAdapter(\n\t\topenaiStream: AsyncIterable<ChatCompletionChunk>,\n\t): LLMStream {\n\t\treturn {\n\t\t\tasync *[Symbol.asyncIterator](): AsyncIterator<StreamChunk> {\n\t\t\t\tfor await (const openaiChunk of openaiStream) {\n\t\t\t\t\tconst choice = openaiChunk.choices[0];\n\t\t\t\t\tif (choice) {\n\t\t\t\t\t\tconst streamChunk: StreamChunk = {\n\t\t\t\t\t\t\tdelta: {\n\t\t\t\t\t\t\t\trole: choice.delta.role,\n\t\t\t\t\t\t\t\tcontent: choice.delta.content || undefined,\n\t\t\t\t\t\t\t\ttool_calls: choice.delta.tool_calls?.map(\n\t\t\t\t\t\t\t\t\t(tc) =>\n\t\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\t\tindex: tc.index,\n\t\t\t\t\t\t\t\t\t\t\tid: tc.id,\n\t\t\t\t\t\t\t\t\t\t\ttype: tc.type,\n\t\t\t\t\t\t\t\t\t\t\tfunction: tc.function,\n\t\t\t\t\t\t\t\t\t\t}) as ToolCallDelta,\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tfinish_reason: choice.finish_reason as any,\n\t\t\t\t\t\t\tmetadata: {\n\t\t\t\t\t\t\t\tprovider: \"openai\",\n\t\t\t\t\t\t\t\tmodel: openaiChunk.model,\n\t\t\t\t\t\t\t\tid: openaiChunk.id,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t\tyield streamChunk;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\tmetadata: { provider: \"openai\" },\n\t\t};\n\t}\n\n\tconvertToolsToFunctions(tools: ConnectorTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tfunctions.push({\n\t\t\t\ttype: \"function\",\n\t\t\t\tfunction: {\n\t\t\t\t\tname: tool.toolName,\n\t\t\t\t\tdescription: tool.description,\n\t\t\t\t\tparameters: tool.inputSchema,\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";AAAA,SAAS,iBAAiB;AAC1B,SAAwB,mBAAsC;AAW9D,SAAS,eAAe,yBAAyB;AAQ1C,IAAM,cAAN,cAA0B,UAA8C;AAAA,EACtE;AAAA,EACA;AAAA,EAER,YACC,SACA,QACA,YACA,WACC;AACD,UAAM;AACN,SAAK,SAAS,IAAI,kBAAkB;AAAA,MACnC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACD,CAAC;AACD,SAAK,YAAY;AAAA,EAClB;AAAA,EAEQ,eAAe,MAAmB;AACzC,YAAQ,MAAM;AAAA,MACb,KAAK,YAAY;AAChB,eAAO;AAAA,MACR,KAAK,YAAY;AAAA,MACjB,KAAK,YAAY;AAChB,eAAO;AAAA,MACR;AACC,eAAO;AAAA,IACT;AAAA,EACD;AAAA,EAEA,iBAAiB,QAII;AACpB,UAAM,EAAE,OAAO,QAAQ,aAAa,IAAI;AACxC,UAAM,WAA6B,CAAC,eACjC,CAAC,IACD,CAAC,EAAE,MAAM,UAAU,SAAS,aAAa,KAAK,EAAE,CAAC;AACpD,UAAM,iBAAmC,CAAC,SACvC,CAAC,IACD,OAAO,SAAS,IAAI,CAAC,YAA2B;AAChD,aAAO;AAAA,QACN,MAAM,KAAK,eAAe,QAAQ,IAAI;AAAA,QACtC,SAAS,QAAQ,QAAQ,MAAM,CAAC;AAAA,MACjC;AAAA,IACD,CAAC;AACH,UAAM,cAA8B,EAAE,MAAM,QAAQ,SAAS,MAAM;AACnE,WAAO,SAAS,OAAO,cAAc,EAAE,OAAO,WAAW;AAAA,EAC1D;AAAA,EAEA,eAAe,UAA4B,SAAuB;AACjE,aAAS,KAAK;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,UAAoD;AAC/D,UAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,KAAK;AAAA,MACZ;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ,WAAW;AAAA,IACjD;AAAA,EACD;AAAA,EAEA,MAAM,wBACL,UACA,WACyB;AACzB,QAAI,UAAU,SAAS,GAAG;AACzB,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,QAC1D,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,MAC9C,CAAC;AAED,YAAM,EAAE,SAAS,WAAW,IAAI,SAAS,QAAQ,CAAC,EAAE;AAEpD,YAAM,YAAoC,YAAY;AAAA,QACrD,CAAC,UAAyC;AACzC,iBAAO;AAAA,YACN,MAAM,MAAM,SAAS;AAAA;AAAA,YAErB,WAAW,KAAK,MAAM,MAAM,SAAS,SAAS;AAAA,UAC/C;AAAA,QACD;AAAA,MACD;AAEA,aAAO;AAAA,QACN,SAAS,WAAW;AAAA,QACpB;AAAA,MACD;AAAA,IACD;AACA,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,8BACL,UACA,WACqB;AACrB,UAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACxD,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,MAC7C,QAAQ;AAAA,IACT,CAAC;AACD,WAAO,KAAK,0BAA0B,MAAM;AAAA,EAC7C;AAAA;AAAA,EAGQ,0BACP,cACY;AACZ,WAAO;AAAA,MACN,QAAQ,OAAO,aAAa,IAAgC;AAC3D,yBAAiB,eAAe,cAAc;AAC7C,gBAAM,SAAS,YAAY,QAAQ,CAAC;AACpC,cAAI,QAAQ;AACX,kBAAM,cAA2B;AAAA,cAChC,OAAO;AAAA,gBACN,MAAM,OAAO,MAAM;AAAA,gBACnB,SAAS,OAAO,MAAM,WAAW;AAAA,gBACjC,YAAY,OAAO,MAAM,YAAY;AAAA,kBACpC,CAAC,QACC;AAAA,oBACA,OAAO,GAAG;AAAA,oBACV,IAAI,GAAG;AAAA,oBACP,MAAM,GAAG;AAAA,oBACT,UAAU,GAAG;AAAA,kBACd;AAAA,gBACF;AAAA,cACD;AAAA,cACA,eAAe,OAAO;AAAA,cACtB,UAAU;AAAA,gBACT,UAAU;AAAA,gBACV,OAAO,YAAY;AAAA,gBACnB,IAAI,YAAY;AAAA,cACjB;AAAA,YACD;AACA,kBAAM;AAAA,UACP;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU,EAAE,UAAU,SAAS;AAAA,IAChC;AAAA,EACD;AAAA,EAEA,wBAAwB,OAA8C;AACrE,UAAM,YAAkC,CAAC;AACzC,eAAW,QAAQ,OAAO;AACzB,gBAAU,KAAK;AAAA,QACd,MAAM;AAAA,QACN,UAAU;AAAA,UACT,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QAClB;AAAA,MACD,CAAC;AAAA,IACF;AACA,WAAO;AAAA,EACR;AACD;","names":[]}
1
+ {"version":3,"sources":["../index.ts"],"sourcesContent":["import { BaseModel, ModelFetchOptions } from \"@ainetwork/adk/modules\";\nimport { MessageObject, MessageRole, type ThreadObject } from \"@ainetwork/adk/types/memory\";\nimport type {\n\tLLMStream,\n\tStreamChunk,\n\tToolCallDelta,\n} from \"@ainetwork/adk/types/stream\";\nimport type {\n\tFetchResponse,\n\tToolCall,\n\tConnectorTool,\n} from \"@ainetwork/adk/types/connector\";\nimport { AzureOpenAI as AzureOpenAIClient } from \"openai\";\nimport type {\n\tChatCompletionMessageParam as CCMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionMessageFunctionToolCall,\n\tChatCompletionMessageToolCall,\n\tChatCompletionTool,\n} from \"openai/resources\";\n\nexport class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {\n\tprivate client: AzureOpenAIClient;\n\tprivate modelName: string;\n\n\tconstructor(\n\t\tbaseUrl: string,\n\t\tapiKey: string,\n\t\tapiVersion: string,\n\t\tmodelName: string,\n\t) {\n\t\tsuper();\n\t\tthis.client = new AzureOpenAIClient({\n\t\t\tbaseURL: baseUrl,\n\t\t\tapiKey: apiKey,\n\t\t\tapiVersion: apiVersion,\n\t\t});\n\t\tthis.modelName = modelName;\n\t}\n\n\tprivate getMessageRole(role: MessageRole) {\n\t\tswitch (role) {\n\t\t\tcase MessageRole.USER:\n\t\t\t\treturn \"user\";\n\t\t\tcase MessageRole.MODEL:\n\t\t\tcase MessageRole.SYSTEM:\n\t\t\t\treturn \"system\";\n\t\t\tdefault:\n\t\t\t\treturn \"system\"; /*FIXME*/\n\t\t}\n\t}\n\n\tgenerateMessages(params: {\n\t\tquery: string;\n\t\tthread?: ThreadObject;\n\t\tsystemPrompt?: string;\n\t}): CCMessageParam[] {\n\t\tconst { query, thread, systemPrompt } = params;\n\t\tconst messages: CCMessageParam[] = !systemPrompt\n\t\t\t? []\n\t\t\t: [{ role: \"system\", content: systemPrompt.trim() }];\n\t\tconst sessionContent: CCMessageParam[] = !thread\n\t\t\t? []\n\t\t\t: thread.messages.map((message: MessageObject) => {\n\t\t\t\t\treturn {\n\t\t\t\t\t\trole: this.getMessageRole(message.role),\n\t\t\t\t\t\tcontent: message.content.parts[0],\n\t\t\t\t\t};\n\t\t\t\t});\n\t\tconst userContent: CCMessageParam = { role: \"user\", content: query };\n\t\treturn messages.concat(sessionContent).concat(userContent);\n\t}\n\n\tappendMessages(messages: CCMessageParam[], message: string): void {\n\t\tmessages.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: message,\n\t\t});\n\t}\n\n\tasync fetch(\n\t\tmessages: CCMessageParam[],\n\t\toptions?: ModelFetchOptions,\n\t): Promise<FetchResponse> {\n\t\tconst response = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\treasoning_effort: options?.reasoning,\n\t\t\tverbosity: options?.verbosity,\n\t\t});\n\n\t\treturn {\n\t\t\tcontent: response.choices[0].message.content || undefined,\n\t\t};\n\t}\n\n\tasync fetchWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t\toptions?: ModelFetchOptions,\n\t): Promise<FetchResponse> {\n\t\tif (functions.length > 0) {\n\t\t\tconst response = await this.client.chat.completions.create({\n\t\t\t\tmodel: this.modelName,\n\t\t\t\tmessages,\n\t\t\t\ttools: functions,\n\t\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\t\treasoning_effort: options?.reasoning,\n\t\t\t\tverbosity: options?.verbosity,\n\t\t\t});\n\n\t\t\tconst { content, tool_calls } = response.choices[0].message;\n\n\t\t\tconst toolCalls: ToolCall[] | undefined = tool_calls?.map(\n\t\t\t\t(value: ChatCompletionMessageToolCall) => {\n\t\t\t\t\tconst v = value as ChatCompletionMessageFunctionToolCall;\n\t\t\t\t\treturn {\n\t\t\t\t\t\tname: v.function.name,\n\t\t\t\t\t\t// FIXME: value.function.arguments could not be a valid JSON\n\t\t\t\t\t\targuments: JSON.parse(v.function.arguments),\n\t\t\t\t\t};\n\t\t\t\t},\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tcontent: content || undefined,\n\t\t\t\ttoolCalls,\n\t\t\t};\n\t\t}\n\t\treturn await this.fetch(messages);\n\t}\n\n\tasync fetchStreamWithContextMessage(\n\t\tmessages: CCMessageParam[],\n\t\tfunctions: ChatCompletionTool[],\n\t\toptions?: ModelFetchOptions,\n\t): Promise<LLMStream> {\n\t\tconst stream = await this.client.chat.completions.create({\n\t\t\tmodel: this.modelName,\n\t\t\tmessages,\n\t\t\ttools: functions,\n\t\t\ttool_choice: functions.length > 0 ? \"auto\" : \"none\",\n\t\t\tstream: true,\n\t\t\treasoning_effort: options?.reasoning,\n\t\t\tverbosity: options?.verbosity,\n\t\t});\n\t\treturn this.createOpenAIStreamAdapter(stream);\n\t}\n\n\t// NOTE(yoojin): Need to switch API Stream type to LLMStream.\n\tprivate createOpenAIStreamAdapter(\n\t\topenaiStream: AsyncIterable<ChatCompletionChunk>,\n\t): LLMStream {\n\t\treturn {\n\t\t\tasync *[Symbol.asyncIterator](): AsyncIterator<StreamChunk> {\n\t\t\t\tfor await (const openaiChunk of openaiStream) {\n\t\t\t\t\tconst choice = openaiChunk.choices[0];\n\t\t\t\t\tif (choice) {\n\t\t\t\t\t\tconst streamChunk: StreamChunk = {\n\t\t\t\t\t\t\tdelta: {\n\t\t\t\t\t\t\t\trole: choice.delta.role,\n\t\t\t\t\t\t\t\tcontent: choice.delta.content || undefined,\n\t\t\t\t\t\t\t\ttool_calls: choice.delta.tool_calls?.map(\n\t\t\t\t\t\t\t\t\t(tc) =>\n\t\t\t\t\t\t\t\t\t\t({\n\t\t\t\t\t\t\t\t\t\t\tindex: tc.index,\n\t\t\t\t\t\t\t\t\t\t\tid: tc.id,\n\t\t\t\t\t\t\t\t\t\t\ttype: tc.type,\n\t\t\t\t\t\t\t\t\t\t\tfunction: tc.function,\n\t\t\t\t\t\t\t\t\t\t}) as ToolCallDelta,\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tfinish_reason: choice.finish_reason as any,\n\t\t\t\t\t\t\tmetadata: {\n\t\t\t\t\t\t\t\tprovider: \"openai\",\n\t\t\t\t\t\t\t\tmodel: openaiChunk.model,\n\t\t\t\t\t\t\t\tid: openaiChunk.id,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t\tyield streamChunk;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\tmetadata: { provider: \"openai\" },\n\t\t};\n\t}\n\n\tconvertToolsToFunctions(tools: ConnectorTool[]): ChatCompletionTool[] {\n\t\tconst functions: ChatCompletionTool[] = [];\n\t\tfor (const tool of tools) {\n\t\t\tfunctions.push({\n\t\t\t\ttype: \"function\",\n\t\t\t\tfunction: {\n\t\t\t\t\tname: tool.toolName,\n\t\t\t\t\tdescription: tool.description,\n\t\t\t\t\tparameters: tool.inputSchema,\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\t\treturn functions;\n\t}\n}\n"],"mappings":";AAAA,SAAS,iBAAoC;AAC7C,SAAwB,mBAAsC;AAW9D,SAAS,eAAe,yBAAyB;AAS1C,IAAM,cAAN,cAA0B,UAA8C;AAAA,EACtE;AAAA,EACA;AAAA,EAER,YACC,SACA,QACA,YACA,WACC;AACD,UAAM;AACN,SAAK,SAAS,IAAI,kBAAkB;AAAA,MACnC,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACD,CAAC;AACD,SAAK,YAAY;AAAA,EAClB;AAAA,EAEQ,eAAe,MAAmB;AACzC,YAAQ,MAAM;AAAA,MACb,KAAK,YAAY;AAChB,eAAO;AAAA,MACR,KAAK,YAAY;AAAA,MACjB,KAAK,YAAY;AAChB,eAAO;AAAA,MACR;AACC,eAAO;AAAA,IACT;AAAA,EACD;AAAA,EAEA,iBAAiB,QAII;AACpB,UAAM,EAAE,OAAO,QAAQ,aAAa,IAAI;AACxC,UAAM,WAA6B,CAAC,eACjC,CAAC,IACD,CAAC,EAAE,MAAM,UAAU,SAAS,aAAa,KAAK,EAAE,CAAC;AACpD,UAAM,iBAAmC,CAAC,SACvC,CAAC,IACD,OAAO,SAAS,IAAI,CAAC,YAA2B;AAChD,aAAO;AAAA,QACN,MAAM,KAAK,eAAe,QAAQ,IAAI;AAAA,QACtC,SAAS,QAAQ,QAAQ,MAAM,CAAC;AAAA,MACjC;AAAA,IACD,CAAC;AACH,UAAM,cAA8B,EAAE,MAAM,QAAQ,SAAS,MAAM;AACnE,WAAO,SAAS,OAAO,cAAc,EAAE,OAAO,WAAW;AAAA,EAC1D;AAAA,EAEA,eAAe,UAA4B,SAAuB;AACjE,aAAS,KAAK;AAAA,MACb,MAAM;AAAA,MACN,SAAS;AAAA,IACV,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,MACL,UACA,SACyB;AACzB,UAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,kBAAkB,SAAS;AAAA,MAC3B,WAAW,SAAS;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACN,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ,WAAW;AAAA,IACjD;AAAA,EACD;AAAA,EAEA,MAAM,wBACL,UACA,WACA,SACyB;AACzB,QAAI,UAAU,SAAS,GAAG;AACzB,YAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,QAC1D,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,QAC7C,kBAAkB,SAAS;AAAA,QAC3B,WAAW,SAAS;AAAA,MACrB,CAAC;AAED,YAAM,EAAE,SAAS,WAAW,IAAI,SAAS,QAAQ,CAAC,EAAE;AAEpD,YAAM,YAAoC,YAAY;AAAA,QACrD,CAAC,UAAyC;AACzC,gBAAM,IAAI;AACV,iBAAO;AAAA,YACN,MAAM,EAAE,SAAS;AAAA;AAAA,YAEjB,WAAW,KAAK,MAAM,EAAE,SAAS,SAAS;AAAA,UAC3C;AAAA,QACD;AAAA,MACD;AAEA,aAAO;AAAA,QACN,SAAS,WAAW;AAAA,QACpB;AAAA,MACD;AAAA,IACD;AACA,WAAO,MAAM,KAAK,MAAM,QAAQ;AAAA,EACjC;AAAA,EAEA,MAAM,8BACL,UACA,WACA,SACqB;AACrB,UAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACxD,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa,UAAU,SAAS,IAAI,SAAS;AAAA,MAC7C,QAAQ;AAAA,MACR,kBAAkB,SAAS;AAAA,MAC3B,WAAW,SAAS;AAAA,IACrB,CAAC;AACD,WAAO,KAAK,0BAA0B,MAAM;AAAA,EAC7C;AAAA;AAAA,EAGQ,0BACP,cACY;AACZ,WAAO;AAAA,MACN,QAAQ,OAAO,aAAa,IAAgC;AAC3D,yBAAiB,eAAe,cAAc;AAC7C,gBAAM,SAAS,YAAY,QAAQ,CAAC;AACpC,cAAI,QAAQ;AACX,kBAAM,cAA2B;AAAA,cAChC,OAAO;AAAA,gBACN,MAAM,OAAO,MAAM;AAAA,gBACnB,SAAS,OAAO,MAAM,WAAW;AAAA,gBACjC,YAAY,OAAO,MAAM,YAAY;AAAA,kBACpC,CAAC,QACC;AAAA,oBACA,OAAO,GAAG;AAAA,oBACV,IAAI,GAAG;AAAA,oBACP,MAAM,GAAG;AAAA,oBACT,UAAU,GAAG;AAAA,kBACd;AAAA,gBACF;AAAA,cACD;AAAA,cACA,eAAe,OAAO;AAAA,cACtB,UAAU;AAAA,gBACT,UAAU;AAAA,gBACV,OAAO,YAAY;AAAA,gBACnB,IAAI,YAAY;AAAA,cACjB;AAAA,YACD;AACA,kBAAM;AAAA,UACP;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU,EAAE,UAAU,SAAS;AAAA,IAChC;AAAA,EACD;AAAA,EAEA,wBAAwB,OAA8C;AACrE,UAAM,YAAkC,CAAC;AACzC,eAAW,QAAQ,OAAO;AACzB,gBAAU,KAAK;AAAA,QACd,MAAM;AAAA,QACN,UAAU;AAAA,UACT,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QAClB;AAAA,MACD,CAAC;AAAA,IACF;AACA,WAAO;AAAA,EACR;AACD;","names":[]}
package/index.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { BaseModel } from "@ainetwork/adk/modules";
1
+ import { BaseModel, ModelFetchOptions } from "@ainetwork/adk/modules";
2
2
  import { MessageObject, MessageRole, type ThreadObject } from "@ainetwork/adk/types/memory";
3
3
  import type {
4
4
  LLMStream,
@@ -14,6 +14,7 @@ import { AzureOpenAI as AzureOpenAIClient } from "openai";
14
14
  import type {
15
15
  ChatCompletionMessageParam as CCMessageParam,
16
16
  ChatCompletionChunk,
17
+ ChatCompletionMessageFunctionToolCall,
17
18
  ChatCompletionMessageToolCall,
18
19
  ChatCompletionTool,
19
20
  } from "openai/resources";
@@ -77,10 +78,15 @@ export class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {
77
78
  });
78
79
  }
79
80
 
80
- async fetch(messages: CCMessageParam[]): Promise<FetchResponse> {
81
+ async fetch(
82
+ messages: CCMessageParam[],
83
+ options?: ModelFetchOptions,
84
+ ): Promise<FetchResponse> {
81
85
  const response = await this.client.chat.completions.create({
82
86
  model: this.modelName,
83
87
  messages,
88
+ reasoning_effort: options?.reasoning,
89
+ verbosity: options?.verbosity,
84
90
  });
85
91
 
86
92
  return {
@@ -91,6 +97,7 @@ export class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {
91
97
  async fetchWithContextMessage(
92
98
  messages: CCMessageParam[],
93
99
  functions: ChatCompletionTool[],
100
+ options?: ModelFetchOptions,
94
101
  ): Promise<FetchResponse> {
95
102
  if (functions.length > 0) {
96
103
  const response = await this.client.chat.completions.create({
@@ -98,16 +105,19 @@ export class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {
98
105
  messages,
99
106
  tools: functions,
100
107
  tool_choice: functions.length > 0 ? "auto" : "none",
108
+ reasoning_effort: options?.reasoning,
109
+ verbosity: options?.verbosity,
101
110
  });
102
111
 
103
112
  const { content, tool_calls } = response.choices[0].message;
104
113
 
105
114
  const toolCalls: ToolCall[] | undefined = tool_calls?.map(
106
115
  (value: ChatCompletionMessageToolCall) => {
116
+ const v = value as ChatCompletionMessageFunctionToolCall;
107
117
  return {
108
- name: value.function.name,
118
+ name: v.function.name,
109
119
  // FIXME: value.function.arguments could not be a valid JSON
110
- arguments: JSON.parse(value.function.arguments),
120
+ arguments: JSON.parse(v.function.arguments),
111
121
  };
112
122
  },
113
123
  );
@@ -123,6 +133,7 @@ export class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {
123
133
  async fetchStreamWithContextMessage(
124
134
  messages: CCMessageParam[],
125
135
  functions: ChatCompletionTool[],
136
+ options?: ModelFetchOptions,
126
137
  ): Promise<LLMStream> {
127
138
  const stream = await this.client.chat.completions.create({
128
139
  model: this.modelName,
@@ -130,6 +141,8 @@ export class AzureOpenAI extends BaseModel<CCMessageParam, ChatCompletionTool> {
130
141
  tools: functions,
131
142
  tool_choice: functions.length > 0 ? "auto" : "none",
132
143
  stream: true,
144
+ reasoning_effort: options?.reasoning,
145
+ verbosity: options?.verbosity,
133
146
  });
134
147
  return this.createOpenAIStreamAdapter(stream);
135
148
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ainetwork/adk-provider-model-azure",
3
- "version": "0.2.4",
3
+ "version": "0.3.2",
4
4
  "author": "AI Network (https://ainetwork.ai)",
5
5
  "type": "module",
6
6
  "engines": {
@@ -21,8 +21,8 @@
21
21
  "clean": "rm -rf dist"
22
22
  },
23
23
  "dependencies": {
24
- "@ainetwork/adk": "0.2.9",
25
- "openai": "^5.10.2"
24
+ "@ainetwork/adk": "^0.3.2",
25
+ "openai": "^6.9.1"
26
26
  },
27
27
  "devDependencies": {
28
28
  "typescript": "^5.0.0"
@@ -31,5 +31,5 @@
31
31
  "publishConfig": {
32
32
  "access": "public"
33
33
  },
34
- "gitHead": "f1315510a23dd44492dc20737c4be71188d01e26"
34
+ "gitHead": "4bfa5afae29304e6cb5f106c7327f5f2092f6601"
35
35
  }