@genkit-ai/compat-oai 1.29.0 → 1.30.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/lib/{audio-CJ8rzf35.d.mts → audio-Cr0WtG4I.d.mts} +88 -11
  2. package/lib/{audio-CJ8rzf35.d.ts → audio-Cr0WtG4I.d.ts} +88 -11
  3. package/lib/audio.d.mts +1 -1
  4. package/lib/audio.d.ts +1 -1
  5. package/lib/audio.js.map +1 -1
  6. package/lib/audio.mjs.map +1 -1
  7. package/lib/deepseek/deepseek.d.mts +1 -1
  8. package/lib/deepseek/deepseek.d.ts +1 -1
  9. package/lib/deepseek/index.d.mts +1 -1
  10. package/lib/deepseek/index.d.ts +1 -1
  11. package/lib/embedder.d.mts +1 -1
  12. package/lib/embedder.d.ts +1 -1
  13. package/lib/image.d.mts +1 -1
  14. package/lib/image.d.ts +1 -1
  15. package/lib/index.d.mts +1 -1
  16. package/lib/index.d.ts +1 -1
  17. package/lib/index.js +7 -0
  18. package/lib/index.js.map +1 -1
  19. package/lib/index.mjs +8 -0
  20. package/lib/index.mjs.map +1 -1
  21. package/lib/model.d.mts +1 -1
  22. package/lib/model.d.ts +1 -1
  23. package/lib/openai/dalle.d.mts +1 -1
  24. package/lib/openai/dalle.d.ts +1 -1
  25. package/lib/openai/index.d.mts +4 -2
  26. package/lib/openai/index.d.ts +4 -2
  27. package/lib/openai/index.js +35 -3
  28. package/lib/openai/index.js.map +1 -1
  29. package/lib/openai/index.mjs +39 -3
  30. package/lib/openai/index.mjs.map +1 -1
  31. package/lib/openai/stt.d.mts +0 -1
  32. package/lib/openai/stt.d.ts +0 -1
  33. package/lib/openai/stt.js +0 -3
  34. package/lib/openai/stt.js.map +1 -1
  35. package/lib/openai/stt.mjs +0 -3
  36. package/lib/openai/stt.mjs.map +1 -1
  37. package/lib/openai/whisper.d.mts +160 -0
  38. package/lib/openai/whisper.d.ts +160 -0
  39. package/lib/openai/whisper.js +112 -0
  40. package/lib/openai/whisper.js.map +1 -0
  41. package/lib/openai/whisper.mjs +91 -0
  42. package/lib/openai/whisper.mjs.map +1 -0
  43. package/lib/translate.d.mts +10 -0
  44. package/lib/translate.d.ts +10 -0
  45. package/lib/translate.js +172 -0
  46. package/lib/translate.js.map +1 -0
  47. package/lib/translate.mjs +143 -0
  48. package/lib/translate.mjs.map +1 -0
  49. package/lib/utils.d.mts +1 -1
  50. package/lib/utils.d.ts +1 -1
  51. package/lib/xai/grok.d.mts +1 -1
  52. package/lib/xai/grok.d.ts +1 -1
  53. package/lib/xai/index.d.mts +1 -1
  54. package/lib/xai/index.d.ts +1 -1
  55. package/package.json +2 -2
package/lib/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { ActionMetadata } from 'genkit';\nimport { ResolvableAction, genkitPluginV2 } from 'genkit/plugin';\nimport { ActionType } from 'genkit/registry';\nimport OpenAI, { type ClientOptions } from 'openai';\nimport { compatOaiModelRef, defineCompatOpenAIModel } from './model.js';\nimport { toModelName } from './utils.js';\n\nexport {\n SpeechConfigSchema,\n TranscriptionConfigSchema,\n compatOaiSpeechModelRef,\n compatOaiTranscriptionModelRef,\n defineCompatOpenAISpeechModel,\n defineCompatOpenAITranscriptionModel,\n type SpeechRequestBuilder,\n type TranscriptionRequestBuilder,\n} from './audio.js';\nexport { defineCompatOpenAIEmbedder } from './embedder.js';\nexport {\n ImageGenerationCommonConfigSchema,\n compatOaiImageModelRef,\n defineCompatOpenAIImageModel,\n type ImageRequestBuilder,\n} from './image.js';\nexport {\n ChatCompletionCommonConfigSchema,\n compatOaiModelRef,\n defineCompatOpenAIModel,\n openAIModelRunner,\n type ModelRequestBuilder,\n} from './model.js';\n\nexport interface PluginOptions extends Partial<Omit<ClientOptions, 'apiKey'>> {\n apiKey?: ClientOptions['apiKey'] | false;\n name: string;\n initializer?: (client: OpenAI) => Promise<ResolvableAction[]>;\n resolver?: (\n client: OpenAI,\n actionType: ActionType,\n actionName: string\n ) => Promise<ResolvableAction | undefined> | ResolvableAction | undefined;\n listActions?: (client: OpenAI) => Promise<ActionMetadata[]>;\n}\n\n/**\n * This module provides the `openAICompatible` plugin factory for Genkit. It\n * enables interaction with OpenAI-compatible API endpoints, allowing users to\n * leverage various AI models by configuring API keys and other client options.\n *\n * The core export is `openAICompatible`, a function that accepts\n * `PluginOptions` and returns a Genkit plugin.\n *\n * Key `PluginOptions` include:\n * - `name`: A string to uniquely identify this plugin instance\n * (e.g., 'deepSeek', 'customOpenAI').\n * - `apiKey`: The API key for the service. If not provided directly, the\n * plugin will attempt to use the `OPENAI_API_KEY` environment variable.\n * - `initializer`: An optional asynchronous function for custom setup after\n * the OpenAI client is initialized. It receives the Genkit instance and the\n * OpenAI client.\n * - Additional properties from OpenAI's `ClientOptions` (like `baseURL`,\n * `timeout`, etc.) can be passed to customize the OpenAI client.\n *\n * The returned plugin initializes an OpenAI client tailored to the provided\n * options, making configured models available for use within Genkit flows.\n *\n * @param {PluginOptions} options - Configuration options for the plugin.\n * @returns A Genkit plugin configured for an OpenAI-compatible service.\n *\n * Usage: Import `openAICompatible` (or your chosen import name for the default\n * export) from this package (e.g., `genkitx-openai`). Then, invoke it within\n * the `plugins` array of `configureGenkit`, providing the necessary\n * `PluginOptions`.\n *\n * Example:\n * ```typescript\n * import myOpenAICompatiblePlugin from 'genkitx-openai'; // Default import\n *\n * export default configureGenkit({\n * plugins: [\n * myOpenAICompatiblePlugin({\n * name: 'gpt4o', // Name for this specific plugin configuration\n * apiKey: process.env.OPENAI_API_KEY,\n * // For a non-OpenAI compatible endpoint:\n * // baseURL: 'https://api.custom-llm-provider.com/v1',\n * }),\n * myOpenAICompatiblePlugin({\n * name: 'localLlama',\n * apiKey: 'ollama', // Or specific key if required by local server\n * baseURL: 'http://localhost:11434/v1', // Example for Ollama\n * }),\n * // ... other plugins\n * ],\n * });\n * ```\n */\nexport const openAICompatible = (options: PluginOptions) => {\n let listActionsCache;\n var client: OpenAI;\n function createClient() {\n if (client) return client;\n const { apiKey, ...restofOptions } = options;\n client = new OpenAI({\n ...restofOptions,\n apiKey: apiKey === false ? 'placeholder' : apiKey,\n });\n return client;\n }\n return genkitPluginV2({\n name: options.name,\n async init() {\n if (!options.initializer) {\n return [];\n }\n return await options.initializer(createClient());\n },\n async resolve(actionType: ActionType, actionName: string) {\n if (options.resolver) {\n return await options.resolver(createClient(), actionType, actionName);\n } else {\n if (actionType === 'model') {\n return defineCompatOpenAIModel({\n name: toModelName(actionName, options.name),\n client: createClient(),\n pluginOptions: options,\n modelRef: compatOaiModelRef({\n name: actionName,\n namespace: options.name,\n }),\n });\n }\n return undefined;\n }\n },\n list:\n // Don't attempt to list models if apiKey set to false\n options.listActions && options.apiKey !== false\n ? async () => {\n if (listActionsCache) return listActionsCache;\n listActionsCache = await options.listActions!(createClient());\n return listActionsCache;\n }\n : undefined,\n });\n};\n\nexport default openAICompatible;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBA,oBAAiD;AAEjD,oBAA2C;AAC3C,mBAA2D;AAC3D,mBAA4B;AAE5B,mBASO;AACP,sBAA2C;AAC3C,mBAKO;AACP,IAAAA,gBAMO;AAkEA,MAAM,mBAAmB,CAAC,YAA2B;AAC1D,MAAI;AACJ,MAAI;AACJ,WAAS,eAAe;AACtB,QAAI,OAAQ,QAAO;AACnB,UAAM,EAAE,QAAQ,GAAG,cAAc,IAAI;AACrC,aAAS,IAAI,cAAAC,QAAO;AAAA,MAClB,GAAG;AAAA,MACH,QAAQ,WAAW,QAAQ,gBAAgB;AAAA,IAC7C,CAAC;AACD,WAAO;AAAA,EACT;AACA,aAAO,8BAAe;AAAA,IACpB,MAAM,QAAQ;AAAA,IACd,MAAM,OAAO;AACX,UAAI,CAAC,QAAQ,aAAa;AACxB,eAAO,CAAC;AAAA,MACV;AACA,aAAO,MAAM,QAAQ,YAAY,aAAa,CAAC;AAAA,IACjD;AAAA,IACA,MAAM,QAAQ,YAAwB,YAAoB;AACxD,UAAI,QAAQ,UAAU;AACpB,eAAO,MAAM,QAAQ,SAAS,aAAa,GAAG,YAAY,UAAU;AAAA,MACtE,OAAO;AACL,YAAI,eAAe,SAAS;AAC1B,qBAAO,sCAAwB;AAAA,YAC7B,UAAM,0BAAY,YAAY,QAAQ,IAAI;AAAA,YAC1C,QAAQ,aAAa;AAAA,YACrB,eAAe;AAAA,YACf,cAAU,gCAAkB;AAAA,cAC1B,MAAM;AAAA,cACN,WAAW,QAAQ;AAAA,YACrB,CAAC;AAAA,UACH,CAAC;AAAA,QACH;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IACA;AAAA;AAAA,MAEE,QAAQ,eAAe,QAAQ,WAAW,QACtC,YAAY;AACV,YAAI,iBAAkB,QAAO;AAC7B,2BAAmB,MAAM,QAAQ,YAAa,aAAa,CAAC;AAC5D,eAAO;AAAA,MACT,IACA;AAAA;AAAA,EACR,CAAC;AACH;AAEA,IAAO,gBAAQ;","names":["import_model","OpenAI"]}
1
+ {"version":3,"sources":["../src/index.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { ActionMetadata } from 'genkit';\nimport { ResolvableAction, genkitPluginV2 } from 'genkit/plugin';\nimport { ActionType } from 'genkit/registry';\nimport OpenAI, { type ClientOptions } from 'openai';\nimport { compatOaiModelRef, defineCompatOpenAIModel } from './model.js';\nimport { toModelName } from './utils.js';\n\nexport {\n SpeechConfigSchema,\n TranscriptionConfigSchema,\n compatOaiSpeechModelRef,\n compatOaiTranscriptionModelRef,\n defineCompatOpenAISpeechModel,\n defineCompatOpenAITranscriptionModel,\n type SpeechRequestBuilder,\n type TranscriptionRequestBuilder,\n} from './audio.js';\nexport { defineCompatOpenAIEmbedder } from './embedder.js';\nexport {\n ImageGenerationCommonConfigSchema,\n compatOaiImageModelRef,\n defineCompatOpenAIImageModel,\n type ImageRequestBuilder,\n} from './image.js';\nexport {\n ChatCompletionCommonConfigSchema,\n compatOaiModelRef,\n defineCompatOpenAIModel,\n openAIModelRunner,\n type ModelRequestBuilder,\n} from './model.js';\nexport {\n TranslationConfigSchema,\n compatOaiTranslationModelRef,\n defineCompatOpenAITranslationModel,\n type TranslationRequestBuilder,\n} from './translate.js';\n\nexport interface PluginOptions extends Partial<Omit<ClientOptions, 'apiKey'>> {\n apiKey?: ClientOptions['apiKey'] | false;\n name: string;\n initializer?: (client: OpenAI) => Promise<ResolvableAction[]>;\n resolver?: (\n client: OpenAI,\n actionType: ActionType,\n actionName: string\n ) => Promise<ResolvableAction | undefined> | ResolvableAction | undefined;\n listActions?: (client: OpenAI) => Promise<ActionMetadata[]>;\n}\n\n/**\n * This module provides the `openAICompatible` plugin factory for Genkit. It\n * enables interaction with OpenAI-compatible API endpoints, allowing users to\n * leverage various AI models by configuring API keys and other client options.\n *\n * The core export is `openAICompatible`, a function that accepts\n * `PluginOptions` and returns a Genkit plugin.\n *\n * Key `PluginOptions` include:\n * - `name`: A string to uniquely identify this plugin instance\n * (e.g., 'deepSeek', 'customOpenAI').\n * - `apiKey`: The API key for the service. If not provided directly, the\n * plugin will attempt to use the `OPENAI_API_KEY` environment variable.\n * - `initializer`: An optional asynchronous function for custom setup after\n * the OpenAI client is initialized. It receives the Genkit instance and the\n * OpenAI client.\n * - Additional properties from OpenAI's `ClientOptions` (like `baseURL`,\n * `timeout`, etc.) can be passed to customize the OpenAI client.\n *\n * The returned plugin initializes an OpenAI client tailored to the provided\n * options, making configured models available for use within Genkit flows.\n *\n * @param {PluginOptions} options - Configuration options for the plugin.\n * @returns A Genkit plugin configured for an OpenAI-compatible service.\n *\n * Usage: Import `openAICompatible` (or your chosen import name for the default\n * export) from this package (e.g., `genkitx-openai`). Then, invoke it within\n * the `plugins` array of `configureGenkit`, providing the necessary\n * `PluginOptions`.\n *\n * Example:\n * ```typescript\n * import myOpenAICompatiblePlugin from 'genkitx-openai'; // Default import\n *\n * export default configureGenkit({\n * plugins: [\n * myOpenAICompatiblePlugin({\n * name: 'gpt4o', // Name for this specific plugin configuration\n * apiKey: process.env.OPENAI_API_KEY,\n * // For a non-OpenAI compatible endpoint:\n * // baseURL: 'https://api.custom-llm-provider.com/v1',\n * }),\n * myOpenAICompatiblePlugin({\n * name: 'localLlama',\n * apiKey: 'ollama', // Or specific key if required by local server\n * baseURL: 'http://localhost:11434/v1', // Example for Ollama\n * }),\n * // ... other plugins\n * ],\n * });\n * ```\n */\nexport const openAICompatible = (options: PluginOptions) => {\n let listActionsCache;\n var client: OpenAI;\n function createClient() {\n if (client) return client;\n const { apiKey, ...restofOptions } = options;\n client = new OpenAI({\n ...restofOptions,\n apiKey: apiKey === false ? 'placeholder' : apiKey,\n });\n return client;\n }\n return genkitPluginV2({\n name: options.name,\n async init() {\n if (!options.initializer) {\n return [];\n }\n return await options.initializer(createClient());\n },\n async resolve(actionType: ActionType, actionName: string) {\n if (options.resolver) {\n return await options.resolver(createClient(), actionType, actionName);\n } else {\n if (actionType === 'model') {\n return defineCompatOpenAIModel({\n name: toModelName(actionName, options.name),\n client: createClient(),\n pluginOptions: options,\n modelRef: compatOaiModelRef({\n name: actionName,\n namespace: options.name,\n }),\n });\n }\n return undefined;\n }\n },\n list:\n // Don't attempt to list models if apiKey set to false\n options.listActions && options.apiKey !== false\n ? async () => {\n if (listActionsCache) return listActionsCache;\n listActionsCache = await options.listActions!(createClient());\n return listActionsCache;\n }\n : undefined,\n });\n};\n\nexport default openAICompatible;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBA,oBAAiD;AAEjD,oBAA2C;AAC3C,mBAA2D;AAC3D,mBAA4B;AAE5B,mBASO;AACP,sBAA2C;AAC3C,mBAKO;AACP,IAAAA,gBAMO;AACP,uBAKO;AAkEA,MAAM,mBAAmB,CAAC,YAA2B;AAC1D,MAAI;AACJ,MAAI;AACJ,WAAS,eAAe;AACtB,QAAI,OAAQ,QAAO;AACnB,UAAM,EAAE,QAAQ,GAAG,cAAc,IAAI;AACrC,aAAS,IAAI,cAAAC,QAAO;AAAA,MAClB,GAAG;AAAA,MACH,QAAQ,WAAW,QAAQ,gBAAgB;AAAA,IAC7C,CAAC;AACD,WAAO;AAAA,EACT;AACA,aAAO,8BAAe;AAAA,IACpB,MAAM,QAAQ;AAAA,IACd,MAAM,OAAO;AACX,UAAI,CAAC,QAAQ,aAAa;AACxB,eAAO,CAAC;AAAA,MACV;AACA,aAAO,MAAM,QAAQ,YAAY,aAAa,CAAC;AAAA,IACjD;AAAA,IACA,MAAM,QAAQ,YAAwB,YAAoB;AACxD,UAAI,QAAQ,UAAU;AACpB,eAAO,MAAM,QAAQ,SAAS,aAAa,GAAG,YAAY,UAAU;AAAA,MACtE,OAAO;AACL,YAAI,eAAe,SAAS;AAC1B,qBAAO,sCAAwB;AAAA,YAC7B,UAAM,0BAAY,YAAY,QAAQ,IAAI;AAAA,YAC1C,QAAQ,aAAa;AAAA,YACrB,eAAe;AAAA,YACf,cAAU,gCAAkB;AAAA,cAC1B,MAAM;AAAA,cACN,WAAW,QAAQ;AAAA,YACrB,CAAC;AAAA,UACH,CAAC;AAAA,QACH;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IACA;AAAA;AAAA,MAEE,QAAQ,eAAe,QAAQ,WAAW,QACtC,YAAY;AACV,YAAI,iBAAkB,QAAO;AAC7B,2BAAmB,MAAM,QAAQ,YAAa,aAAa,CAAC;AAC5D,eAAO;AAAA,MACT,IACA;AAAA;AAAA,EACR,CAAC;AACH;AAEA,IAAO,gBAAQ;","names":["import_model","OpenAI"]}
package/lib/index.mjs CHANGED
@@ -22,6 +22,11 @@ import {
22
22
  defineCompatOpenAIModel as defineCompatOpenAIModel2,
23
23
  openAIModelRunner
24
24
  } from "./model.js";
25
+ import {
26
+ TranslationConfigSchema,
27
+ compatOaiTranslationModelRef,
28
+ defineCompatOpenAITranslationModel
29
+ } from "./translate.js";
25
30
  const openAICompatible = (options) => {
26
31
  let listActionsCache;
27
32
  var client;
@@ -76,16 +81,19 @@ export {
76
81
  ImageGenerationCommonConfigSchema,
77
82
  SpeechConfigSchema,
78
83
  TranscriptionConfigSchema,
84
+ TranslationConfigSchema,
79
85
  compatOaiImageModelRef,
80
86
  compatOaiModelRef2 as compatOaiModelRef,
81
87
  compatOaiSpeechModelRef,
82
88
  compatOaiTranscriptionModelRef,
89
+ compatOaiTranslationModelRef,
83
90
  index_default as default,
84
91
  defineCompatOpenAIEmbedder,
85
92
  defineCompatOpenAIImageModel,
86
93
  defineCompatOpenAIModel2 as defineCompatOpenAIModel,
87
94
  defineCompatOpenAISpeechModel,
88
95
  defineCompatOpenAITranscriptionModel,
96
+ defineCompatOpenAITranslationModel,
89
97
  openAICompatible,
90
98
  openAIModelRunner
91
99
  };
package/lib/index.mjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { ActionMetadata } from 'genkit';\nimport { ResolvableAction, genkitPluginV2 } from 'genkit/plugin';\nimport { ActionType } from 'genkit/registry';\nimport OpenAI, { type ClientOptions } from 'openai';\nimport { compatOaiModelRef, defineCompatOpenAIModel } from './model.js';\nimport { toModelName } from './utils.js';\n\nexport {\n SpeechConfigSchema,\n TranscriptionConfigSchema,\n compatOaiSpeechModelRef,\n compatOaiTranscriptionModelRef,\n defineCompatOpenAISpeechModel,\n defineCompatOpenAITranscriptionModel,\n type SpeechRequestBuilder,\n type TranscriptionRequestBuilder,\n} from './audio.js';\nexport { defineCompatOpenAIEmbedder } from './embedder.js';\nexport {\n ImageGenerationCommonConfigSchema,\n compatOaiImageModelRef,\n defineCompatOpenAIImageModel,\n type ImageRequestBuilder,\n} from './image.js';\nexport {\n ChatCompletionCommonConfigSchema,\n compatOaiModelRef,\n defineCompatOpenAIModel,\n openAIModelRunner,\n type ModelRequestBuilder,\n} from './model.js';\n\nexport interface PluginOptions extends Partial<Omit<ClientOptions, 'apiKey'>> {\n apiKey?: ClientOptions['apiKey'] | false;\n name: string;\n initializer?: (client: OpenAI) => Promise<ResolvableAction[]>;\n resolver?: (\n client: OpenAI,\n actionType: ActionType,\n actionName: string\n ) => Promise<ResolvableAction | undefined> | ResolvableAction | undefined;\n listActions?: (client: OpenAI) => Promise<ActionMetadata[]>;\n}\n\n/**\n * This module provides the `openAICompatible` plugin factory for Genkit. It\n * enables interaction with OpenAI-compatible API endpoints, allowing users to\n * leverage various AI models by configuring API keys and other client options.\n *\n * The core export is `openAICompatible`, a function that accepts\n * `PluginOptions` and returns a Genkit plugin.\n *\n * Key `PluginOptions` include:\n * - `name`: A string to uniquely identify this plugin instance\n * (e.g., 'deepSeek', 'customOpenAI').\n * - `apiKey`: The API key for the service. If not provided directly, the\n * plugin will attempt to use the `OPENAI_API_KEY` environment variable.\n * - `initializer`: An optional asynchronous function for custom setup after\n * the OpenAI client is initialized. It receives the Genkit instance and the\n * OpenAI client.\n * - Additional properties from OpenAI's `ClientOptions` (like `baseURL`,\n * `timeout`, etc.) can be passed to customize the OpenAI client.\n *\n * The returned plugin initializes an OpenAI client tailored to the provided\n * options, making configured models available for use within Genkit flows.\n *\n * @param {PluginOptions} options - Configuration options for the plugin.\n * @returns A Genkit plugin configured for an OpenAI-compatible service.\n *\n * Usage: Import `openAICompatible` (or your chosen import name for the default\n * export) from this package (e.g., `genkitx-openai`). Then, invoke it within\n * the `plugins` array of `configureGenkit`, providing the necessary\n * `PluginOptions`.\n *\n * Example:\n * ```typescript\n * import myOpenAICompatiblePlugin from 'genkitx-openai'; // Default import\n *\n * export default configureGenkit({\n * plugins: [\n * myOpenAICompatiblePlugin({\n * name: 'gpt4o', // Name for this specific plugin configuration\n * apiKey: process.env.OPENAI_API_KEY,\n * // For a non-OpenAI compatible endpoint:\n * // baseURL: 'https://api.custom-llm-provider.com/v1',\n * }),\n * myOpenAICompatiblePlugin({\n * name: 'localLlama',\n * apiKey: 'ollama', // Or specific key if required by local server\n * baseURL: 'http://localhost:11434/v1', // Example for Ollama\n * }),\n * // ... other plugins\n * ],\n * });\n * ```\n */\nexport const openAICompatible = (options: PluginOptions) => {\n let listActionsCache;\n var client: OpenAI;\n function createClient() {\n if (client) return client;\n const { apiKey, ...restofOptions } = options;\n client = new OpenAI({\n ...restofOptions,\n apiKey: apiKey === false ? 'placeholder' : apiKey,\n });\n return client;\n }\n return genkitPluginV2({\n name: options.name,\n async init() {\n if (!options.initializer) {\n return [];\n }\n return await options.initializer(createClient());\n },\n async resolve(actionType: ActionType, actionName: string) {\n if (options.resolver) {\n return await options.resolver(createClient(), actionType, actionName);\n } else {\n if (actionType === 'model') {\n return defineCompatOpenAIModel({\n name: toModelName(actionName, options.name),\n client: createClient(),\n pluginOptions: options,\n modelRef: compatOaiModelRef({\n name: actionName,\n namespace: options.name,\n }),\n });\n }\n return undefined;\n }\n },\n list:\n // Don't attempt to list models if apiKey set to false\n options.listActions && options.apiKey !== false\n ? async () => {\n if (listActionsCache) return listActionsCache;\n listActionsCache = await options.listActions!(createClient());\n return listActionsCache;\n }\n : undefined,\n });\n};\n\nexport default openAICompatible;\n"],"mappings":"AAiBA,SAA2B,sBAAsB;AAEjD,OAAO,YAAoC;AAC3C,SAAS,mBAAmB,+BAA+B;AAC3D,SAAS,mBAAmB;AAE5B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAGK;AACP,SAAS,kCAAkC;AAC3C;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP;AAAA,EACE;AAAA,EACA,qBAAAA;AAAA,EACA,2BAAAC;AAAA,EACA;AAAA,OAEK;AAkEA,MAAM,mBAAmB,CAAC,YAA2B;AAC1D,MAAI;AACJ,MAAI;AACJ,WAAS,eAAe;AACtB,QAAI,OAAQ,QAAO;AACnB,UAAM,EAAE,QAAQ,GAAG,cAAc,IAAI;AACrC,aAAS,IAAI,OAAO;AAAA,MAClB,GAAG;AAAA,MACH,QAAQ,WAAW,QAAQ,gBAAgB;AAAA,IAC7C,CAAC;AACD,WAAO;AAAA,EACT;AACA,SAAO,eAAe;AAAA,IACpB,MAAM,QAAQ;AAAA,IACd,MAAM,OAAO;AACX,UAAI,CAAC,QAAQ,aAAa;AACxB,eAAO,CAAC;AAAA,MACV;AACA,aAAO,MAAM,QAAQ,YAAY,aAAa,CAAC;AAAA,IACjD;AAAA,IACA,MAAM,QAAQ,YAAwB,YAAoB;AACxD,UAAI,QAAQ,UAAU;AACpB,eAAO,MAAM,QAAQ,SAAS,aAAa,GAAG,YAAY,UAAU;AAAA,MACtE,OAAO;AACL,YAAI,eAAe,SAAS;AAC1B,iBAAO,wBAAwB;AAAA,YAC7B,MAAM,YAAY,YAAY,QAAQ,IAAI;AAAA,YAC1C,QAAQ,aAAa;AAAA,YACrB,eAAe;AAAA,YACf,UAAU,kBAAkB;AAAA,cAC1B,MAAM;AAAA,cACN,WAAW,QAAQ;AAAA,YACrB,CAAC;AAAA,UACH,CAAC;AAAA,QACH;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IACA;AAAA;AAAA,MAEE,QAAQ,eAAe,QAAQ,WAAW,QACtC,YAAY;AACV,YAAI,iBAAkB,QAAO;AAC7B,2BAAmB,MAAM,QAAQ,YAAa,aAAa,CAAC;AAC5D,eAAO;AAAA,MACT,IACA;AAAA;AAAA,EACR,CAAC;AACH;AAEA,IAAO,gBAAQ;","names":["compatOaiModelRef","defineCompatOpenAIModel"]}
1
+ {"version":3,"sources":["../src/index.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { ActionMetadata } from 'genkit';\nimport { ResolvableAction, genkitPluginV2 } from 'genkit/plugin';\nimport { ActionType } from 'genkit/registry';\nimport OpenAI, { type ClientOptions } from 'openai';\nimport { compatOaiModelRef, defineCompatOpenAIModel } from './model.js';\nimport { toModelName } from './utils.js';\n\nexport {\n SpeechConfigSchema,\n TranscriptionConfigSchema,\n compatOaiSpeechModelRef,\n compatOaiTranscriptionModelRef,\n defineCompatOpenAISpeechModel,\n defineCompatOpenAITranscriptionModel,\n type SpeechRequestBuilder,\n type TranscriptionRequestBuilder,\n} from './audio.js';\nexport { defineCompatOpenAIEmbedder } from './embedder.js';\nexport {\n ImageGenerationCommonConfigSchema,\n compatOaiImageModelRef,\n defineCompatOpenAIImageModel,\n type ImageRequestBuilder,\n} from './image.js';\nexport {\n ChatCompletionCommonConfigSchema,\n compatOaiModelRef,\n defineCompatOpenAIModel,\n openAIModelRunner,\n type ModelRequestBuilder,\n} from './model.js';\nexport {\n TranslationConfigSchema,\n compatOaiTranslationModelRef,\n defineCompatOpenAITranslationModel,\n type TranslationRequestBuilder,\n} from './translate.js';\n\nexport interface PluginOptions extends Partial<Omit<ClientOptions, 'apiKey'>> {\n apiKey?: ClientOptions['apiKey'] | false;\n name: string;\n initializer?: (client: OpenAI) => Promise<ResolvableAction[]>;\n resolver?: (\n client: OpenAI,\n actionType: ActionType,\n actionName: string\n ) => Promise<ResolvableAction | undefined> | ResolvableAction | undefined;\n listActions?: (client: OpenAI) => Promise<ActionMetadata[]>;\n}\n\n/**\n * This module provides the `openAICompatible` plugin factory for Genkit. It\n * enables interaction with OpenAI-compatible API endpoints, allowing users to\n * leverage various AI models by configuring API keys and other client options.\n *\n * The core export is `openAICompatible`, a function that accepts\n * `PluginOptions` and returns a Genkit plugin.\n *\n * Key `PluginOptions` include:\n * - `name`: A string to uniquely identify this plugin instance\n * (e.g., 'deepSeek', 'customOpenAI').\n * - `apiKey`: The API key for the service. If not provided directly, the\n * plugin will attempt to use the `OPENAI_API_KEY` environment variable.\n * - `initializer`: An optional asynchronous function for custom setup after\n * the OpenAI client is initialized. It receives the Genkit instance and the\n * OpenAI client.\n * - Additional properties from OpenAI's `ClientOptions` (like `baseURL`,\n * `timeout`, etc.) can be passed to customize the OpenAI client.\n *\n * The returned plugin initializes an OpenAI client tailored to the provided\n * options, making configured models available for use within Genkit flows.\n *\n * @param {PluginOptions} options - Configuration options for the plugin.\n * @returns A Genkit plugin configured for an OpenAI-compatible service.\n *\n * Usage: Import `openAICompatible` (or your chosen import name for the default\n * export) from this package (e.g., `genkitx-openai`). Then, invoke it within\n * the `plugins` array of `configureGenkit`, providing the necessary\n * `PluginOptions`.\n *\n * Example:\n * ```typescript\n * import myOpenAICompatiblePlugin from 'genkitx-openai'; // Default import\n *\n * export default configureGenkit({\n * plugins: [\n * myOpenAICompatiblePlugin({\n * name: 'gpt4o', // Name for this specific plugin configuration\n * apiKey: process.env.OPENAI_API_KEY,\n * // For a non-OpenAI compatible endpoint:\n * // baseURL: 'https://api.custom-llm-provider.com/v1',\n * }),\n * myOpenAICompatiblePlugin({\n * name: 'localLlama',\n * apiKey: 'ollama', // Or specific key if required by local server\n * baseURL: 'http://localhost:11434/v1', // Example for Ollama\n * }),\n * // ... other plugins\n * ],\n * });\n * ```\n */\nexport const openAICompatible = (options: PluginOptions) => {\n let listActionsCache;\n var client: OpenAI;\n function createClient() {\n if (client) return client;\n const { apiKey, ...restofOptions } = options;\n client = new OpenAI({\n ...restofOptions,\n apiKey: apiKey === false ? 'placeholder' : apiKey,\n });\n return client;\n }\n return genkitPluginV2({\n name: options.name,\n async init() {\n if (!options.initializer) {\n return [];\n }\n return await options.initializer(createClient());\n },\n async resolve(actionType: ActionType, actionName: string) {\n if (options.resolver) {\n return await options.resolver(createClient(), actionType, actionName);\n } else {\n if (actionType === 'model') {\n return defineCompatOpenAIModel({\n name: toModelName(actionName, options.name),\n client: createClient(),\n pluginOptions: options,\n modelRef: compatOaiModelRef({\n name: actionName,\n namespace: options.name,\n }),\n });\n }\n return undefined;\n }\n },\n list:\n // Don't attempt to list models if apiKey set to false\n options.listActions && options.apiKey !== false\n ? async () => {\n if (listActionsCache) return listActionsCache;\n listActionsCache = await options.listActions!(createClient());\n return listActionsCache;\n }\n : undefined,\n });\n};\n\nexport default openAICompatible;\n"],"mappings":"AAiBA,SAA2B,sBAAsB;AAEjD,OAAO,YAAoC;AAC3C,SAAS,mBAAmB,+BAA+B;AAC3D,SAAS,mBAAmB;AAE5B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAGK;AACP,SAAS,kCAAkC;AAC3C;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP;AAAA,EACE;AAAA,EACA,qBAAAA;AAAA,EACA,2BAAAC;AAAA,EACA;AAAA,OAEK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AAkEA,MAAM,mBAAmB,CAAC,YAA2B;AAC1D,MAAI;AACJ,MAAI;AACJ,WAAS,eAAe;AACtB,QAAI,OAAQ,QAAO;AACnB,UAAM,EAAE,QAAQ,GAAG,cAAc,IAAI;AACrC,aAAS,IAAI,OAAO;AAAA,MAClB,GAAG;AAAA,MACH,QAAQ,WAAW,QAAQ,gBAAgB;AAAA,IAC7C,CAAC;AACD,WAAO;AAAA,EACT;AACA,SAAO,eAAe;AAAA,IACpB,MAAM,QAAQ;AAAA,IACd,MAAM,OAAO;AACX,UAAI,CAAC,QAAQ,aAAa;AACxB,eAAO,CAAC;AAAA,MACV;AACA,aAAO,MAAM,QAAQ,YAAY,aAAa,CAAC;AAAA,IACjD;AAAA,IACA,MAAM,QAAQ,YAAwB,YAAoB;AACxD,UAAI,QAAQ,UAAU;AACpB,eAAO,MAAM,QAAQ,SAAS,aAAa,GAAG,YAAY,UAAU;AAAA,MACtE,OAAO;AACL,YAAI,eAAe,SAAS;AAC1B,iBAAO,wBAAwB;AAAA,YAC7B,MAAM,YAAY,YAAY,QAAQ,IAAI;AAAA,YAC1C,QAAQ,aAAa;AAAA,YACrB,eAAe;AAAA,YACf,UAAU,kBAAkB;AAAA,cAC1B,MAAM;AAAA,cACN,WAAW,QAAQ;AAAA,YACrB,CAAC;AAAA,UACH,CAAC;AAAA,QACH;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IACA;AAAA;AAAA,MAEE,QAAQ,eAAe,QAAQ,WAAW,QACtC,YAAY;AACV,YAAI,iBAAkB,QAAO;AAC7B,2BAAmB,MAAM,QAAQ,YAAa,aAAa,CAAC;AAC5D,eAAO;AAAA,MACT,IACA;AAAA;AAAA,EACR,CAAC;AACH;AAEA,IAAO,gBAAQ;","names":["compatOaiModelRef","defineCompatOpenAIModel"]}
package/lib/model.d.mts CHANGED
@@ -2,7 +2,7 @@ import 'genkit';
2
2
  import 'genkit/model';
3
3
  import 'openai';
4
4
  import 'openai/resources/index.mjs';
5
- export { C as ChatCompletionCommonConfigSchema, M as ModelRequestBuilder, n as compatOaiModelRef, m as defineCompatOpenAIModel, j as fromOpenAIChoice, k as fromOpenAIChunkChoice, i as fromOpenAIToolCall, o as openAIModelRunner, h as toOpenAIMessages, l as toOpenAIRequestBody, t as toOpenAIRole, g as toOpenAITextAndMedia, f as toOpenAITool } from './audio-CJ8rzf35.mjs';
5
+ export { C as ChatCompletionCommonConfigSchema, M as ModelRequestBuilder, n as compatOaiModelRef, m as defineCompatOpenAIModel, j as fromOpenAIChoice, k as fromOpenAIChunkChoice, i as fromOpenAIToolCall, o as openAIModelRunner, h as toOpenAIMessages, l as toOpenAIRequestBody, t as toOpenAIRole, g as toOpenAITextAndMedia, f as toOpenAITool } from './audio-Cr0WtG4I.mjs';
6
6
  import 'openai/core.mjs';
7
7
  import 'openai/resources/audio/index.mjs';
8
8
  import 'genkit/plugin';
package/lib/model.d.ts CHANGED
@@ -2,7 +2,7 @@ import 'genkit';
2
2
  import 'genkit/model';
3
3
  import 'openai';
4
4
  import 'openai/resources/index.mjs';
5
- export { C as ChatCompletionCommonConfigSchema, M as ModelRequestBuilder, n as compatOaiModelRef, m as defineCompatOpenAIModel, j as fromOpenAIChoice, k as fromOpenAIChunkChoice, i as fromOpenAIToolCall, o as openAIModelRunner, h as toOpenAIMessages, l as toOpenAIRequestBody, t as toOpenAIRole, g as toOpenAITextAndMedia, f as toOpenAITool } from './audio-CJ8rzf35.js';
5
+ export { C as ChatCompletionCommonConfigSchema, M as ModelRequestBuilder, n as compatOaiModelRef, m as defineCompatOpenAIModel, j as fromOpenAIChoice, k as fromOpenAIChunkChoice, i as fromOpenAIToolCall, o as openAIModelRunner, h as toOpenAIMessages, l as toOpenAIRequestBody, t as toOpenAIRole, g as toOpenAITextAndMedia, f as toOpenAITool } from './audio-Cr0WtG4I.js';
6
6
  import 'openai/core.mjs';
7
7
  import 'openai/resources/audio/index.mjs';
8
8
  import 'genkit/plugin';
@@ -1,7 +1,7 @@
1
1
  import * as genkit from 'genkit';
2
2
  import { z } from 'genkit';
3
3
  import { ModelInfo } from 'genkit/model';
4
- import { a as ImageRequestBuilder } from '../audio-CJ8rzf35.mjs';
4
+ import { a as ImageRequestBuilder } from '../audio-Cr0WtG4I.mjs';
5
5
  import 'openai';
6
6
  import 'openai/core.mjs';
7
7
  import 'openai/resources/audio/index.mjs';
@@ -1,7 +1,7 @@
1
1
  import * as genkit from 'genkit';
2
2
  import { z } from 'genkit';
3
3
  import { ModelInfo } from 'genkit/model';
4
- import { a as ImageRequestBuilder } from '../audio-CJ8rzf35.js';
4
+ import { a as ImageRequestBuilder } from '../audio-Cr0WtG4I.js';
5
5
  import 'openai';
6
6
  import 'openai/core.mjs';
7
7
  import 'openai/resources/audio/index.mjs';
@@ -1,11 +1,12 @@
1
1
  import { z, ModelReference, EmbedderReference } from 'genkit';
2
2
  import { GenkitPluginV2 } from 'genkit/plugin';
3
- import { P as PluginOptions, I as ImageGenerationCommonConfigSchema, S as SpeechConfigSchema, T as TranscriptionConfigSchema } from '../audio-CJ8rzf35.mjs';
3
+ import { P as PluginOptions, I as ImageGenerationCommonConfigSchema, S as SpeechConfigSchema, T as TranscriptionConfigSchema } from '../audio-Cr0WtG4I.mjs';
4
4
  import { SUPPORTED_IMAGE_MODELS } from './dalle.mjs';
5
5
  import { SUPPORTED_EMBEDDING_MODELS, TextEmbeddingConfigSchema } from './embedder.mjs';
6
6
  import { SUPPORTED_GPT_MODELS, OpenAIChatCompletionConfigSchema } from './gpt.mjs';
7
7
  import { SUPPORTED_STT_MODELS } from './stt.mjs';
8
8
  import { SUPPORTED_TTS_MODELS } from './tts.mjs';
9
+ import { SUPPORTED_WHISPER_MODELS, WhisperConfigSchema } from './whisper.mjs';
9
10
  import 'genkit/model';
10
11
  import 'openai';
11
12
  import 'openai/core.mjs';
@@ -37,7 +38,8 @@ type OpenAIPlugin = {
37
38
  (params?: OpenAIPluginOptions): GenkitPluginV2;
38
39
  model(name: keyof typeof SUPPORTED_IMAGE_MODELS | (`dall-e${string}` & {}) | (`gpt-image-${string}` & {}), config?: z.infer<typeof ImageGenerationCommonConfigSchema>): ModelReference<typeof ImageGenerationCommonConfigSchema>;
39
40
  model(name: keyof typeof SUPPORTED_TTS_MODELS | (`tts-${string}` & {}) | (`${string}-tts` & {}), config?: z.infer<typeof SpeechConfigSchema>): ModelReference<typeof SpeechConfigSchema>;
40
- model(name: keyof typeof SUPPORTED_STT_MODELS | (`whisper-${string}` & {}) | (`${string}-transcribe` & {}), config?: z.infer<typeof TranscriptionConfigSchema>): ModelReference<typeof TranscriptionConfigSchema>;
41
+ model(name: keyof typeof SUPPORTED_WHISPER_MODELS | (`whisper-${string}` & {}), config?: z.infer<typeof WhisperConfigSchema>): ModelReference<typeof WhisperConfigSchema>;
42
+ model(name: keyof typeof SUPPORTED_STT_MODELS | (`${string}-transcribe` & {}), config?: z.infer<typeof TranscriptionConfigSchema>): ModelReference<typeof TranscriptionConfigSchema>;
41
43
  model(name: keyof typeof SUPPORTED_GPT_MODELS | (`gpt-${string}` & {}) | (`o${number}` & {}), config?: z.infer<typeof OpenAIChatCompletionConfigSchema>): ModelReference<typeof OpenAIChatCompletionConfigSchema>;
42
44
  model(name: string, config?: any): ModelReference<z.ZodTypeAny>;
43
45
  embedder(name: keyof typeof SUPPORTED_EMBEDDING_MODELS | (`${string}-embedding-${string}` & {}), config?: z.infer<typeof TextEmbeddingConfigSchema>): EmbedderReference<typeof TextEmbeddingConfigSchema>;
@@ -1,11 +1,12 @@
1
1
  import { z, ModelReference, EmbedderReference } from 'genkit';
2
2
  import { GenkitPluginV2 } from 'genkit/plugin';
3
- import { P as PluginOptions, I as ImageGenerationCommonConfigSchema, S as SpeechConfigSchema, T as TranscriptionConfigSchema } from '../audio-CJ8rzf35.js';
3
+ import { P as PluginOptions, I as ImageGenerationCommonConfigSchema, S as SpeechConfigSchema, T as TranscriptionConfigSchema } from '../audio-Cr0WtG4I.js';
4
4
  import { SUPPORTED_IMAGE_MODELS } from './dalle.js';
5
5
  import { SUPPORTED_EMBEDDING_MODELS, TextEmbeddingConfigSchema } from './embedder.js';
6
6
  import { SUPPORTED_GPT_MODELS, OpenAIChatCompletionConfigSchema } from './gpt.js';
7
7
  import { SUPPORTED_STT_MODELS } from './stt.js';
8
8
  import { SUPPORTED_TTS_MODELS } from './tts.js';
9
+ import { SUPPORTED_WHISPER_MODELS, WhisperConfigSchema } from './whisper.js';
9
10
  import 'genkit/model';
10
11
  import 'openai';
11
12
  import 'openai/core.mjs';
@@ -37,7 +38,8 @@ type OpenAIPlugin = {
37
38
  (params?: OpenAIPluginOptions): GenkitPluginV2;
38
39
  model(name: keyof typeof SUPPORTED_IMAGE_MODELS | (`dall-e${string}` & {}) | (`gpt-image-${string}` & {}), config?: z.infer<typeof ImageGenerationCommonConfigSchema>): ModelReference<typeof ImageGenerationCommonConfigSchema>;
39
40
  model(name: keyof typeof SUPPORTED_TTS_MODELS | (`tts-${string}` & {}) | (`${string}-tts` & {}), config?: z.infer<typeof SpeechConfigSchema>): ModelReference<typeof SpeechConfigSchema>;
40
- model(name: keyof typeof SUPPORTED_STT_MODELS | (`whisper-${string}` & {}) | (`${string}-transcribe` & {}), config?: z.infer<typeof TranscriptionConfigSchema>): ModelReference<typeof TranscriptionConfigSchema>;
41
+ model(name: keyof typeof SUPPORTED_WHISPER_MODELS | (`whisper-${string}` & {}), config?: z.infer<typeof WhisperConfigSchema>): ModelReference<typeof WhisperConfigSchema>;
42
+ model(name: keyof typeof SUPPORTED_STT_MODELS | (`${string}-transcribe` & {}), config?: z.infer<typeof TranscriptionConfigSchema>): ModelReference<typeof TranscriptionConfigSchema>;
41
43
  model(name: keyof typeof SUPPORTED_GPT_MODELS | (`gpt-${string}` & {}) | (`o${number}` & {}), config?: z.infer<typeof OpenAIChatCompletionConfigSchema>): ModelReference<typeof OpenAIChatCompletionConfigSchema>;
42
44
  model(name: string, config?: any): ModelReference<z.ZodTypeAny>;
43
45
  embedder(name: keyof typeof SUPPORTED_EMBEDDING_MODELS | (`${string}-embedding-${string}` & {}), config?: z.infer<typeof TextEmbeddingConfigSchema>): EmbedderReference<typeof TextEmbeddingConfigSchema>;
@@ -34,6 +34,7 @@ var import_embedder2 = require("./embedder.js");
34
34
  var import_gpt = require("./gpt.js");
35
35
  var import_stt = require("./stt.js");
36
36
  var import_tts = require("./tts.js");
37
+ var import_whisper = require("./whisper.js");
37
38
  const UNSUPPORTED_MODEL_MATCHERS = ["babbage", "davinci", "codex"];
38
39
  function createResolver(pluginOptions) {
39
40
  return async (client, actionType, actionName) => {
@@ -59,7 +60,15 @@ function createResolver(pluginOptions) {
59
60
  pluginOptions,
60
61
  modelRef
61
62
  });
62
- } else if (actionName.includes("whisper") || actionName.includes("transcribe")) {
63
+ } else if (actionName.includes("whisper")) {
64
+ const modelRef = (0, import_whisper.openAIWhisperModelRef)({ name: actionName });
65
+ return (0, import_whisper.defineOpenAIWhisperModel)({
66
+ name: modelRef.name,
67
+ client,
68
+ pluginOptions,
69
+ modelRef
70
+ });
71
+ } else if (actionName.includes("transcribe")) {
63
72
  const modelRef = (0, import_stt.openAITranscriptionModelRef)({
64
73
  name: actionName
65
74
  });
@@ -106,7 +115,14 @@ const listActions = async (client) => {
106
115
  info: modelRef.info,
107
116
  configSchema: modelRef.configSchema
108
117
  });
109
- } else if (model2.id.includes("whisper") || model2.id.includes("transcribe")) {
118
+ } else if (model2.id.includes("whisper")) {
119
+ const modelRef = import_whisper.SUPPORTED_WHISPER_MODELS[model2.id] ?? (0, import_whisper.openAIWhisperModelRef)({ name: model2.id });
120
+ return (0, import_genkit.modelActionMetadata)({
121
+ name: modelRef.name,
122
+ info: modelRef.info,
123
+ configSchema: modelRef.configSchema
124
+ });
125
+ } else if (model2.id.includes("transcribe")) {
110
126
  const modelRef = import_stt.SUPPORTED_STT_MODELS[model2.id] ?? (0, import_stt.openAITranscriptionModelRef)({ name: model2.id });
111
127
  return (0, import_genkit.modelActionMetadata)({
112
128
  name: modelRef.name,
@@ -161,6 +177,16 @@ function openAIPlugin(options) {
161
177
  })
162
178
  )
163
179
  );
180
+ models.push(
181
+ ...Object.values(import_whisper.SUPPORTED_WHISPER_MODELS).map(
182
+ (modelRef) => (0, import_whisper.defineOpenAIWhisperModel)({
183
+ name: modelRef.name,
184
+ client,
185
+ pluginOptions,
186
+ modelRef
187
+ })
188
+ )
189
+ );
164
190
  models.push(
165
191
  ...Object.values(import_stt.SUPPORTED_STT_MODELS).map(
166
192
  (modelRef) => (0, import_audio.defineCompatOpenAITranscriptionModel)({
@@ -201,7 +227,13 @@ const model = (name, config) => {
201
227
  config
202
228
  });
203
229
  }
204
- if (name.includes("whisper") || name.includes("transcribe")) {
230
+ if (name.includes("whisper")) {
231
+ return (0, import_whisper.openAIWhisperModelRef)({
232
+ name,
233
+ config
234
+ });
235
+ }
236
+ if (name.includes("transcribe")) {
205
237
  return (0, import_stt.openAITranscriptionModelRef)({
206
238
  name,
207
239
  config
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/openai/index.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n ActionMetadata,\n embedderActionMetadata,\n embedderRef,\n EmbedderReference,\n modelActionMetadata,\n ModelReference,\n z,\n} from 'genkit';\nimport { ResolvableAction, type GenkitPluginV2 } from 'genkit/plugin';\nimport { ActionType } from 'genkit/registry';\nimport OpenAI from 'openai';\nimport {\n defineCompatOpenAISpeechModel,\n defineCompatOpenAITranscriptionModel,\n SpeechConfigSchema,\n TranscriptionConfigSchema,\n} from '../audio.js';\nimport { defineCompatOpenAIEmbedder } from '../embedder.js';\nimport {\n defineCompatOpenAIImageModel,\n ImageGenerationCommonConfigSchema,\n} from '../image.js';\nimport { openAICompatible, PluginOptions } from '../index.js';\nimport { defineCompatOpenAIModel } from '../model.js';\nimport {\n gptImage1RequestBuilder,\n openAIImageModelRef,\n SUPPORTED_IMAGE_MODELS,\n} from './dalle.js';\nimport {\n SUPPORTED_EMBEDDING_MODELS,\n TextEmbeddingConfigSchema,\n} from './embedder.js';\nimport {\n OpenAIChatCompletionConfigSchema,\n openAIModelRef,\n SUPPORTED_GPT_MODELS,\n} from './gpt.js';\nimport { openAITranscriptionModelRef, SUPPORTED_STT_MODELS } from './stt.js';\nimport { openAISpeechModelRef, SUPPORTED_TTS_MODELS } from './tts.js';\n\nexport type OpenAIPluginOptions = Omit<PluginOptions, 'name' | 'baseURL'>;\n\nconst UNSUPPORTED_MODEL_MATCHERS = ['babbage', 'davinci', 'codex'];\n\nfunction createResolver(pluginOptions: PluginOptions) {\n return async (client: OpenAI, actionType: ActionType, actionName: string) => {\n if (actionType === 'embedder') {\n return defineCompatOpenAIEmbedder({\n name: actionName,\n client,\n pluginOptions,\n });\n } else if (\n actionName.includes('gpt-image-1') ||\n actionName.includes('dall-e')\n ) {\n const modelRef = openAIImageModelRef({ name: actionName });\n return defineCompatOpenAIImageModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else if (actionName.includes('tts')) {\n const modelRef = openAISpeechModelRef({ name: actionName });\n return defineCompatOpenAISpeechModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else if (\n actionName.includes('whisper') ||\n actionName.includes('transcribe')\n ) {\n const modelRef = openAITranscriptionModelRef({\n name: actionName,\n });\n return defineCompatOpenAITranscriptionModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else {\n const modelRef = openAIModelRef({ name: actionName });\n return defineCompatOpenAIModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n }\n };\n}\n\nfunction filterOpenAiModels(model: OpenAI.Model): boolean {\n return !UNSUPPORTED_MODEL_MATCHERS.some((m) => model.id.includes(m));\n}\n\nconst listActions = async (client: OpenAI): Promise<ActionMetadata[]> => {\n return await client.models.list().then((response) =>\n response.data.filter(filterOpenAiModels).map((model: OpenAI.Model) => {\n if (model.id.includes('embedding')) {\n return embedderActionMetadata({\n name: model.id,\n configSchema: TextEmbeddingConfigSchema,\n info: SUPPORTED_EMBEDDING_MODELS[model.id]?.info,\n });\n } else if (\n model.id.includes('gpt-image-1') ||\n model.id.includes('dall-e')\n ) {\n const modelRef =\n SUPPORTED_IMAGE_MODELS[model.id] ??\n openAIImageModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else if (model.id.includes('tts')) {\n const modelRef =\n SUPPORTED_TTS_MODELS[model.id] ??\n openAISpeechModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else if (\n model.id.includes('whisper') ||\n model.id.includes('transcribe')\n ) {\n const modelRef =\n SUPPORTED_STT_MODELS[model.id] ??\n openAITranscriptionModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else {\n const modelRef =\n SUPPORTED_GPT_MODELS[model.id] ?? openAIModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n }\n })\n );\n};\n\nexport function openAIPlugin(options?: OpenAIPluginOptions): GenkitPluginV2 {\n const pluginOptions = { name: 'openai', ...options };\n return openAICompatible({\n name: 'openai',\n ...options,\n initializer: async (client) => {\n const models = [] as ResolvableAction[];\n models.push(\n ...Object.values(SUPPORTED_GPT_MODELS).map((modelRef) =>\n defineCompatOpenAIModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_EMBEDDING_MODELS).map((embedderRef) =>\n defineCompatOpenAIEmbedder({\n name: embedderRef.name,\n client,\n pluginOptions,\n embedderRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_TTS_MODELS).map((modelRef) =>\n defineCompatOpenAISpeechModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_STT_MODELS).map((modelRef) =>\n defineCompatOpenAITranscriptionModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_IMAGE_MODELS).map((modelRef) =>\n defineCompatOpenAIImageModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n requestBuilder: modelRef.name.includes('gpt-image-1')\n ? gptImage1RequestBuilder\n : undefined,\n })\n )\n );\n return models;\n },\n resolver: createResolver(pluginOptions),\n listActions,\n });\n}\n\nexport type OpenAIPlugin = {\n (params?: OpenAIPluginOptions): GenkitPluginV2;\n model(\n name:\n | keyof typeof SUPPORTED_IMAGE_MODELS\n | (`dall-e${string}` & {})\n | (`gpt-image-${string}` & {}),\n config?: z.infer<typeof ImageGenerationCommonConfigSchema>\n ): ModelReference<typeof ImageGenerationCommonConfigSchema>;\n model(\n name:\n | keyof typeof SUPPORTED_TTS_MODELS\n | (`tts-${string}` & {})\n | (`${string}-tts` & {}),\n config?: z.infer<typeof SpeechConfigSchema>\n ): ModelReference<typeof SpeechConfigSchema>;\n model(\n name:\n | keyof typeof SUPPORTED_STT_MODELS\n | (`whisper-${string}` & {})\n | (`${string}-transcribe` & {}),\n config?: z.infer<typeof TranscriptionConfigSchema>\n ): ModelReference<typeof TranscriptionConfigSchema>;\n model(\n name:\n | keyof typeof SUPPORTED_GPT_MODELS\n | (`gpt-${string}` & {})\n | (`o${number}` & {}),\n config?: z.infer<typeof OpenAIChatCompletionConfigSchema>\n ): ModelReference<typeof OpenAIChatCompletionConfigSchema>;\n model(name: string, config?: any): ModelReference<z.ZodTypeAny>;\n embedder(\n name:\n | keyof typeof SUPPORTED_EMBEDDING_MODELS\n | (`${string}-embedding-${string}` & {}),\n config?: z.infer<typeof TextEmbeddingConfigSchema>\n ): EmbedderReference<typeof TextEmbeddingConfigSchema>;\n embedder(name: string, config?: any): EmbedderReference<z.ZodTypeAny>;\n};\n\nconst model = ((name: string, config?: any): ModelReference<z.ZodTypeAny> => {\n if (name.includes('gpt-image-1') || name.includes('dall-e')) {\n return openAIImageModelRef({\n name,\n config,\n });\n }\n if (name.includes('tts')) {\n return openAISpeechModelRef({\n name,\n config,\n });\n }\n if (name.includes('whisper') || name.includes('transcribe')) {\n return openAITranscriptionModelRef({\n name,\n config,\n });\n }\n return openAIModelRef({\n name,\n config,\n });\n}) as OpenAIPlugin['model'];\n\nconst embedder = ((\n name: string,\n config?: any\n): EmbedderReference<z.ZodTypeAny> => {\n return embedderRef({\n name,\n config,\n configSchema: TextEmbeddingConfigSchema,\n namespace: 'openai',\n });\n}) as OpenAIPlugin['embedder'];\n\n/**\n * This module provides an interface to the OpenAI models through the Genkit\n * plugin system. It allows users to interact with various models by providing\n * an API key and optional configuration.\n *\n * The main export is the `openai` plugin, which can be configured with an API\n * key either directly or through environment variables. It initializes the\n * OpenAI client and makes available the models for use.\n *\n * Exports:\n * - openai: The main plugin function to interact with OpenAI.\n *\n * Usage:\n * To use the models, initialize the openai plugin inside `configureGenkit` and\n * pass the configuration options. If no API key is provided in the options, the\n * environment variable `OPENAI_API_KEY` must be set.\n *\n * Example:\n * ```\n * import { openAI } from '@genkit-ai/compat-oai/openai';\n *\n * export default configureGenkit({\n * plugins: [\n * openai()\n * ... // other plugins\n * ]\n * });\n * ```\n */\nexport const openAI: OpenAIPlugin = Object.assign(openAIPlugin, {\n model,\n embedder,\n});\n\nexport default openAI;\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBA,oBAQO;AAIP,mBAKO;AACP,sBAA2C;AAC3C,mBAGO;AACP,eAAgD;AAChD,mBAAwC;AACxC,mBAIO;AACP,IAAAA,mBAGO;AACP,iBAIO;AACP,iBAAkE;AAClE,iBAA2D;AAI3D,MAAM,6BAA6B,CAAC,WAAW,WAAW,OAAO;AAEjE,SAAS,eAAe,eAA8B;AACpD,SAAO,OAAO,QAAgB,YAAwB,eAAuB;AAC3E,QAAI,eAAe,YAAY;AAC7B,iBAAO,4CAA2B;AAAA,QAChC,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WACE,WAAW,SAAS,aAAa,KACjC,WAAW,SAAS,QAAQ,GAC5B;AACA,YAAM,eAAW,kCAAoB,EAAE,MAAM,WAAW,CAAC;AACzD,iBAAO,2CAA6B;AAAA,QAClC,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WAAW,WAAW,SAAS,KAAK,GAAG;AACrC,YAAM,eAAW,iCAAqB,EAAE,MAAM,WAAW,CAAC;AAC1D,iBAAO,4CAA8B;AAAA,QACnC,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WACE,WAAW,SAAS,SAAS,KAC7B,WAAW,SAAS,YAAY,GAChC;AACA,YAAM,eAAW,wCAA4B;AAAA,QAC3C,MAAM;AAAA,MACR,CAAC;AACD,iBAAO,mDAAqC;AAAA,QAC1C,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,eAAW,2BAAe,EAAE,MAAM,WAAW,CAAC;AACpD,iBAAO,sCAAwB;AAAA,QAC7B,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;AAEA,SAAS,mBAAmBC,QAA8B;AACxD,SAAO,CAAC,2BAA2B,KAAK,CAAC,MAAMA,OAAM,GAAG,SAAS,CAAC,CAAC;AACrE;AAEA,MAAM,cAAc,OAAO,WAA8C;AACvE,SAAO,MAAM,OAAO,OAAO,KAAK,EAAE;AAAA,IAAK,CAAC,aACtC,SAAS,KAAK,OAAO,kBAAkB,EAAE,IAAI,CAACA,WAAwB;AACpE,UAAIA,OAAM,GAAG,SAAS,WAAW,GAAG;AAClC,mBAAO,sCAAuB;AAAA,UAC5B,MAAMA,OAAM;AAAA,UACZ,cAAc;AAAA,UACd,MAAM,4CAA2BA,OAAM,EAAE,GAAG;AAAA,QAC9C,CAAC;AAAA,MACH,WACEA,OAAM,GAAG,SAAS,aAAa,KAC/BA,OAAM,GAAG,SAAS,QAAQ,GAC1B;AACA,cAAM,WACJ,oCAAuBA,OAAM,EAAE,SAC/B,kCAAoB,EAAE,MAAMA,OAAM,GAAG,CAAC;AACxC,mBAAO,mCAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,WAAWA,OAAM,GAAG,SAAS,KAAK,GAAG;AACnC,cAAM,WACJ,gCAAqBA,OAAM,EAAE,SAC7B,iCAAqB,EAAE,MAAMA,OAAM,GAAG,CAAC;AACzC,mBAAO,mCAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,WACEA,OAAM,GAAG,SAAS,SAAS,KAC3BA,OAAM,GAAG,SAAS,YAAY,GAC9B;AACA,cAAM,WACJ,gCAAqBA,OAAM,EAAE,SAC7B,wCAA4B,EAAE,MAAMA,OAAM,GAAG,CAAC;AAChD,mBAAO,mCAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,OAAO;AACL,cAAM,WACJ,gCAAqBA,OAAM,EAAE,SAAK,2BAAe,EAAE,MAAMA,OAAM,GAAG,CAAC;AACrE,mBAAO,mCAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,SAAS,aAAa,SAA+C;AAC1E,QAAM,gBAAgB,EAAE,MAAM,UAAU,GAAG,QAAQ;AACnD,aAAO,2BAAiB;AAAA,IACtB,MAAM;AAAA,IACN,GAAG;AAAA,IACH,aAAa,OAAO,WAAW;AAC7B,YAAM,SAAS,CAAC;AAChB,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,+BAAoB,EAAE;AAAA,UAAI,CAAC,iBAC1C,sCAAwB;AAAA,YACtB,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,2CAA0B,EAAE;AAAA,UAAI,CAACC,qBAChD,4CAA2B;AAAA,YACzB,MAAMA,aAAY;AAAA,YAClB;AAAA,YACA;AAAA,YACA,aAAAA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,+BAAoB,EAAE;AAAA,UAAI,CAAC,iBAC1C,4CAA8B;AAAA,YAC5B,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,+BAAoB,EAAE;AAAA,UAAI,CAAC,iBAC1C,mDAAqC;AAAA,YACnC,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,mCAAsB,EAAE;AAAA,UAAI,CAAC,iBAC5C,2CAA6B;AAAA,YAC3B,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,SAAS,KAAK,SAAS,aAAa,IAChD,uCACA;AAAA,UACN,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA,IACA,UAAU,eAAe,aAAa;AAAA,IACtC;AAAA,EACF,CAAC;AACH;AA0CA,MAAM,QAAS,CAAC,MAAc,WAA+C;AAC3E,MAAI,KAAK,SAAS,aAAa,KAAK,KAAK,SAAS,QAAQ,GAAG;AAC3D,eAAO,kCAAoB;AAAA,MACzB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,MAAI,KAAK,SAAS,KAAK,GAAG;AACxB,eAAO,iCAAqB;AAAA,MAC1B;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,MAAI,KAAK,SAAS,SAAS,KAAK,KAAK,SAAS,YAAY,GAAG;AAC3D,eAAO,wCAA4B;AAAA,MACjC;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,aAAO,2BAAe;AAAA,IACpB;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAEA,MAAM,WAAY,CAChB,MACA,WACoC;AACpC,aAAO,2BAAY;AAAA,IACjB;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd,WAAW;AAAA,EACb,CAAC;AACH;AA+BO,MAAM,SAAuB,OAAO,OAAO,cAAc;AAAA,EAC9D;AAAA,EACA;AACF,CAAC;AAED,IAAO,iBAAQ;","names":["import_embedder","model","embedderRef"]}
1
+ {"version":3,"sources":["../../src/openai/index.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n ActionMetadata,\n embedderActionMetadata,\n embedderRef,\n EmbedderReference,\n modelActionMetadata,\n ModelReference,\n z,\n} from 'genkit';\nimport { ResolvableAction, type GenkitPluginV2 } from 'genkit/plugin';\nimport { ActionType } from 'genkit/registry';\nimport OpenAI from 'openai';\nimport {\n defineCompatOpenAISpeechModel,\n defineCompatOpenAITranscriptionModel,\n SpeechConfigSchema,\n TranscriptionConfigSchema,\n} from '../audio.js';\nimport { defineCompatOpenAIEmbedder } from '../embedder.js';\nimport {\n defineCompatOpenAIImageModel,\n ImageGenerationCommonConfigSchema,\n} from '../image.js';\nimport { openAICompatible, PluginOptions } from '../index.js';\nimport { defineCompatOpenAIModel } from '../model.js';\nimport {\n gptImage1RequestBuilder,\n openAIImageModelRef,\n SUPPORTED_IMAGE_MODELS,\n} from './dalle.js';\nimport {\n SUPPORTED_EMBEDDING_MODELS,\n TextEmbeddingConfigSchema,\n} from './embedder.js';\nimport {\n OpenAIChatCompletionConfigSchema,\n openAIModelRef,\n SUPPORTED_GPT_MODELS,\n} from './gpt.js';\nimport { openAITranscriptionModelRef, SUPPORTED_STT_MODELS } from './stt.js';\nimport { openAISpeechModelRef, SUPPORTED_TTS_MODELS } from './tts.js';\nimport {\n defineOpenAIWhisperModel,\n openAIWhisperModelRef,\n SUPPORTED_WHISPER_MODELS,\n WhisperConfigSchema,\n} from './whisper.js';\n\nexport type OpenAIPluginOptions = Omit<PluginOptions, 'name' | 'baseURL'>;\n\nconst UNSUPPORTED_MODEL_MATCHERS = ['babbage', 'davinci', 'codex'];\n\nfunction createResolver(pluginOptions: PluginOptions) {\n return async (client: OpenAI, actionType: ActionType, actionName: string) => {\n if (actionType === 'embedder') {\n return defineCompatOpenAIEmbedder({\n name: actionName,\n client,\n pluginOptions,\n });\n } else if (\n actionName.includes('gpt-image-1') ||\n actionName.includes('dall-e')\n ) {\n const modelRef = openAIImageModelRef({ name: actionName });\n return defineCompatOpenAIImageModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else if (actionName.includes('tts')) {\n const modelRef = openAISpeechModelRef({ name: actionName });\n return defineCompatOpenAISpeechModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else if (actionName.includes('whisper')) {\n const modelRef = openAIWhisperModelRef({ name: actionName });\n return defineOpenAIWhisperModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else if (actionName.includes('transcribe')) {\n const modelRef = openAITranscriptionModelRef({\n name: actionName,\n });\n return defineCompatOpenAITranscriptionModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else {\n const modelRef = openAIModelRef({ name: actionName });\n return defineCompatOpenAIModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n }\n };\n}\n\nfunction filterOpenAiModels(model: OpenAI.Model): boolean {\n return !UNSUPPORTED_MODEL_MATCHERS.some((m) => model.id.includes(m));\n}\n\nconst listActions = async (client: OpenAI): Promise<ActionMetadata[]> => {\n return await client.models.list().then((response) =>\n response.data.filter(filterOpenAiModels).map((model: OpenAI.Model) => {\n if (model.id.includes('embedding')) {\n return embedderActionMetadata({\n name: model.id,\n configSchema: TextEmbeddingConfigSchema,\n info: SUPPORTED_EMBEDDING_MODELS[model.id]?.info,\n });\n } else if (\n model.id.includes('gpt-image-1') ||\n model.id.includes('dall-e')\n ) {\n const modelRef =\n SUPPORTED_IMAGE_MODELS[model.id] ??\n openAIImageModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else if (model.id.includes('tts')) {\n const modelRef =\n SUPPORTED_TTS_MODELS[model.id] ??\n openAISpeechModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else if (model.id.includes('whisper')) {\n const modelRef =\n SUPPORTED_WHISPER_MODELS[model.id] ??\n openAIWhisperModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else if (model.id.includes('transcribe')) {\n const modelRef =\n SUPPORTED_STT_MODELS[model.id] ??\n openAITranscriptionModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else {\n const modelRef =\n SUPPORTED_GPT_MODELS[model.id] ?? openAIModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n }\n })\n );\n};\n\nexport function openAIPlugin(options?: OpenAIPluginOptions): GenkitPluginV2 {\n const pluginOptions = { name: 'openai', ...options };\n return openAICompatible({\n name: 'openai',\n ...options,\n initializer: async (client) => {\n const models = [] as ResolvableAction[];\n models.push(\n ...Object.values(SUPPORTED_GPT_MODELS).map((modelRef) =>\n defineCompatOpenAIModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_EMBEDDING_MODELS).map((embedderRef) =>\n defineCompatOpenAIEmbedder({\n name: embedderRef.name,\n client,\n pluginOptions,\n embedderRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_TTS_MODELS).map((modelRef) =>\n defineCompatOpenAISpeechModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_WHISPER_MODELS).map((modelRef) =>\n defineOpenAIWhisperModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_STT_MODELS).map((modelRef) =>\n defineCompatOpenAITranscriptionModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_IMAGE_MODELS).map((modelRef) =>\n defineCompatOpenAIImageModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n requestBuilder: modelRef.name.includes('gpt-image-1')\n ? gptImage1RequestBuilder\n : undefined,\n })\n )\n );\n return models;\n },\n resolver: createResolver(pluginOptions),\n listActions,\n });\n}\n\nexport type OpenAIPlugin = {\n (params?: OpenAIPluginOptions): GenkitPluginV2;\n model(\n name:\n | keyof typeof SUPPORTED_IMAGE_MODELS\n | (`dall-e${string}` & {})\n | (`gpt-image-${string}` & {}),\n config?: z.infer<typeof ImageGenerationCommonConfigSchema>\n ): ModelReference<typeof ImageGenerationCommonConfigSchema>;\n model(\n name:\n | keyof typeof SUPPORTED_TTS_MODELS\n | (`tts-${string}` & {})\n | (`${string}-tts` & {}),\n config?: z.infer<typeof SpeechConfigSchema>\n ): ModelReference<typeof SpeechConfigSchema>;\n model(\n name: keyof typeof SUPPORTED_WHISPER_MODELS | (`whisper-${string}` & {}),\n config?: z.infer<typeof WhisperConfigSchema>\n ): ModelReference<typeof WhisperConfigSchema>;\n model(\n name: keyof typeof SUPPORTED_STT_MODELS | (`${string}-transcribe` & {}),\n config?: z.infer<typeof TranscriptionConfigSchema>\n ): ModelReference<typeof TranscriptionConfigSchema>;\n model(\n name:\n | keyof typeof SUPPORTED_GPT_MODELS\n | (`gpt-${string}` & {})\n | (`o${number}` & {}),\n config?: z.infer<typeof OpenAIChatCompletionConfigSchema>\n ): ModelReference<typeof OpenAIChatCompletionConfigSchema>;\n model(name: string, config?: any): ModelReference<z.ZodTypeAny>;\n embedder(\n name:\n | keyof typeof SUPPORTED_EMBEDDING_MODELS\n | (`${string}-embedding-${string}` & {}),\n config?: z.infer<typeof TextEmbeddingConfigSchema>\n ): EmbedderReference<typeof TextEmbeddingConfigSchema>;\n embedder(name: string, config?: any): EmbedderReference<z.ZodTypeAny>;\n};\n\nconst model = ((name: string, config?: any): ModelReference<z.ZodTypeAny> => {\n if (name.includes('gpt-image-1') || name.includes('dall-e')) {\n return openAIImageModelRef({\n name,\n config,\n });\n }\n if (name.includes('tts')) {\n return openAISpeechModelRef({\n name,\n config,\n });\n }\n if (name.includes('whisper')) {\n return openAIWhisperModelRef({\n name,\n config,\n });\n }\n if (name.includes('transcribe')) {\n return openAITranscriptionModelRef({\n name,\n config,\n });\n }\n return openAIModelRef({\n name,\n config,\n });\n}) as OpenAIPlugin['model'];\n\nconst embedder = ((\n name: string,\n config?: any\n): EmbedderReference<z.ZodTypeAny> => {\n return embedderRef({\n name,\n config,\n configSchema: TextEmbeddingConfigSchema,\n namespace: 'openai',\n });\n}) as OpenAIPlugin['embedder'];\n\n/**\n * This module provides an interface to the OpenAI models through the Genkit\n * plugin system. It allows users to interact with various models by providing\n * an API key and optional configuration.\n *\n * The main export is the `openai` plugin, which can be configured with an API\n * key either directly or through environment variables. It initializes the\n * OpenAI client and makes available the models for use.\n *\n * Exports:\n * - openai: The main plugin function to interact with OpenAI.\n *\n * Usage:\n * To use the models, initialize the openai plugin inside `configureGenkit` and\n * pass the configuration options. If no API key is provided in the options, the\n * environment variable `OPENAI_API_KEY` must be set.\n *\n * Example:\n * ```\n * import { openAI } from '@genkit-ai/compat-oai/openai';\n *\n * export default configureGenkit({\n * plugins: [\n * openai()\n * ... // other plugins\n * ]\n * });\n * ```\n */\nexport const openAI: OpenAIPlugin = Object.assign(openAIPlugin, {\n model,\n embedder,\n});\n\nexport default openAI;\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBA,oBAQO;AAIP,mBAKO;AACP,sBAA2C;AAC3C,mBAGO;AACP,eAAgD;AAChD,mBAAwC;AACxC,mBAIO;AACP,IAAAA,mBAGO;AACP,iBAIO;AACP,iBAAkE;AAClE,iBAA2D;AAC3D,qBAKO;AAIP,MAAM,6BAA6B,CAAC,WAAW,WAAW,OAAO;AAEjE,SAAS,eAAe,eAA8B;AACpD,SAAO,OAAO,QAAgB,YAAwB,eAAuB;AAC3E,QAAI,eAAe,YAAY;AAC7B,iBAAO,4CAA2B;AAAA,QAChC,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WACE,WAAW,SAAS,aAAa,KACjC,WAAW,SAAS,QAAQ,GAC5B;AACA,YAAM,eAAW,kCAAoB,EAAE,MAAM,WAAW,CAAC;AACzD,iBAAO,2CAA6B;AAAA,QAClC,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WAAW,WAAW,SAAS,KAAK,GAAG;AACrC,YAAM,eAAW,iCAAqB,EAAE,MAAM,WAAW,CAAC;AAC1D,iBAAO,4CAA8B;AAAA,QACnC,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WAAW,WAAW,SAAS,SAAS,GAAG;AACzC,YAAM,eAAW,sCAAsB,EAAE,MAAM,WAAW,CAAC;AAC3D,iBAAO,yCAAyB;AAAA,QAC9B,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WAAW,WAAW,SAAS,YAAY,GAAG;AAC5C,YAAM,eAAW,wCAA4B;AAAA,QAC3C,MAAM;AAAA,MACR,CAAC;AACD,iBAAO,mDAAqC;AAAA,QAC1C,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,eAAW,2BAAe,EAAE,MAAM,WAAW,CAAC;AACpD,iBAAO,sCAAwB;AAAA,QAC7B,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;AAEA,SAAS,mBAAmBC,QAA8B;AACxD,SAAO,CAAC,2BAA2B,KAAK,CAAC,MAAMA,OAAM,GAAG,SAAS,CAAC,CAAC;AACrE;AAEA,MAAM,cAAc,OAAO,WAA8C;AACvE,SAAO,MAAM,OAAO,OAAO,KAAK,EAAE;AAAA,IAAK,CAAC,aACtC,SAAS,KAAK,OAAO,kBAAkB,EAAE,IAAI,CAACA,WAAwB;AACpE,UAAIA,OAAM,GAAG,SAAS,WAAW,GAAG;AAClC,mBAAO,sCAAuB;AAAA,UAC5B,MAAMA,OAAM;AAAA,UACZ,cAAc;AAAA,UACd,MAAM,4CAA2BA,OAAM,EAAE,GAAG;AAAA,QAC9C,CAAC;AAAA,MACH,WACEA,OAAM,GAAG,SAAS,aAAa,KAC/BA,OAAM,GAAG,SAAS,QAAQ,GAC1B;AACA,cAAM,WACJ,oCAAuBA,OAAM,EAAE,SAC/B,kCAAoB,EAAE,MAAMA,OAAM,GAAG,CAAC;AACxC,mBAAO,mCAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,WAAWA,OAAM,GAAG,SAAS,KAAK,GAAG;AACnC,cAAM,WACJ,gCAAqBA,OAAM,EAAE,SAC7B,iCAAqB,EAAE,MAAMA,OAAM,GAAG,CAAC;AACzC,mBAAO,mCAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,WAAWA,OAAM,GAAG,SAAS,SAAS,GAAG;AACvC,cAAM,WACJ,wCAAyBA,OAAM,EAAE,SACjC,sCAAsB,EAAE,MAAMA,OAAM,GAAG,CAAC;AAC1C,mBAAO,mCAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,WAAWA,OAAM,GAAG,SAAS,YAAY,GAAG;AAC1C,cAAM,WACJ,gCAAqBA,OAAM,EAAE,SAC7B,wCAA4B,EAAE,MAAMA,OAAM,GAAG,CAAC;AAChD,mBAAO,mCAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,OAAO;AACL,cAAM,WACJ,gCAAqBA,OAAM,EAAE,SAAK,2BAAe,EAAE,MAAMA,OAAM,GAAG,CAAC;AACrE,mBAAO,mCAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,SAAS,aAAa,SAA+C;AAC1E,QAAM,gBAAgB,EAAE,MAAM,UAAU,GAAG,QAAQ;AACnD,aAAO,2BAAiB;AAAA,IACtB,MAAM;AAAA,IACN,GAAG;AAAA,IACH,aAAa,OAAO,WAAW;AAC7B,YAAM,SAAS,CAAC;AAChB,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,+BAAoB,EAAE;AAAA,UAAI,CAAC,iBAC1C,sCAAwB;AAAA,YACtB,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,2CAA0B,EAAE;AAAA,UAAI,CAACC,qBAChD,4CAA2B;AAAA,YACzB,MAAMA,aAAY;AAAA,YAClB;AAAA,YACA;AAAA,YACA,aAAAA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,+BAAoB,EAAE;AAAA,UAAI,CAAC,iBAC1C,4CAA8B;AAAA,YAC5B,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,uCAAwB,EAAE;AAAA,UAAI,CAAC,iBAC9C,yCAAyB;AAAA,YACvB,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,+BAAoB,EAAE;AAAA,UAAI,CAAC,iBAC1C,mDAAqC;AAAA,YACnC,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,mCAAsB,EAAE;AAAA,UAAI,CAAC,iBAC5C,2CAA6B;AAAA,YAC3B,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,SAAS,KAAK,SAAS,aAAa,IAChD,uCACA;AAAA,UACN,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA,IACA,UAAU,eAAe,aAAa;AAAA,IACtC;AAAA,EACF,CAAC;AACH;AA2CA,MAAM,QAAS,CAAC,MAAc,WAA+C;AAC3E,MAAI,KAAK,SAAS,aAAa,KAAK,KAAK,SAAS,QAAQ,GAAG;AAC3D,eAAO,kCAAoB;AAAA,MACzB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,MAAI,KAAK,SAAS,KAAK,GAAG;AACxB,eAAO,iCAAqB;AAAA,MAC1B;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,MAAI,KAAK,SAAS,SAAS,GAAG;AAC5B,eAAO,sCAAsB;AAAA,MAC3B;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,MAAI,KAAK,SAAS,YAAY,GAAG;AAC/B,eAAO,wCAA4B;AAAA,MACjC;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,aAAO,2BAAe;AAAA,IACpB;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAEA,MAAM,WAAY,CAChB,MACA,WACoC;AACpC,aAAO,2BAAY;AAAA,IACjB;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd,WAAW;AAAA,EACb,CAAC;AACH;AA+BO,MAAM,SAAuB,OAAO,OAAO,cAAc;AAAA,EAC9D;AAAA,EACA;AACF,CAAC;AAED,IAAO,iBAAQ;","names":["import_embedder","model","embedderRef"]}
@@ -28,6 +28,11 @@ import {
28
28
  } from "./gpt.js";
29
29
  import { openAITranscriptionModelRef, SUPPORTED_STT_MODELS } from "./stt.js";
30
30
  import { openAISpeechModelRef, SUPPORTED_TTS_MODELS } from "./tts.js";
31
+ import {
32
+ defineOpenAIWhisperModel,
33
+ openAIWhisperModelRef,
34
+ SUPPORTED_WHISPER_MODELS
35
+ } from "./whisper.js";
31
36
  const UNSUPPORTED_MODEL_MATCHERS = ["babbage", "davinci", "codex"];
32
37
  function createResolver(pluginOptions) {
33
38
  return async (client, actionType, actionName) => {
@@ -53,7 +58,15 @@ function createResolver(pluginOptions) {
53
58
  pluginOptions,
54
59
  modelRef
55
60
  });
56
- } else if (actionName.includes("whisper") || actionName.includes("transcribe")) {
61
+ } else if (actionName.includes("whisper")) {
62
+ const modelRef = openAIWhisperModelRef({ name: actionName });
63
+ return defineOpenAIWhisperModel({
64
+ name: modelRef.name,
65
+ client,
66
+ pluginOptions,
67
+ modelRef
68
+ });
69
+ } else if (actionName.includes("transcribe")) {
57
70
  const modelRef = openAITranscriptionModelRef({
58
71
  name: actionName
59
72
  });
@@ -100,7 +113,14 @@ const listActions = async (client) => {
100
113
  info: modelRef.info,
101
114
  configSchema: modelRef.configSchema
102
115
  });
103
- } else if (model2.id.includes("whisper") || model2.id.includes("transcribe")) {
116
+ } else if (model2.id.includes("whisper")) {
117
+ const modelRef = SUPPORTED_WHISPER_MODELS[model2.id] ?? openAIWhisperModelRef({ name: model2.id });
118
+ return modelActionMetadata({
119
+ name: modelRef.name,
120
+ info: modelRef.info,
121
+ configSchema: modelRef.configSchema
122
+ });
123
+ } else if (model2.id.includes("transcribe")) {
104
124
  const modelRef = SUPPORTED_STT_MODELS[model2.id] ?? openAITranscriptionModelRef({ name: model2.id });
105
125
  return modelActionMetadata({
106
126
  name: modelRef.name,
@@ -155,6 +175,16 @@ function openAIPlugin(options) {
155
175
  })
156
176
  )
157
177
  );
178
+ models.push(
179
+ ...Object.values(SUPPORTED_WHISPER_MODELS).map(
180
+ (modelRef) => defineOpenAIWhisperModel({
181
+ name: modelRef.name,
182
+ client,
183
+ pluginOptions,
184
+ modelRef
185
+ })
186
+ )
187
+ );
158
188
  models.push(
159
189
  ...Object.values(SUPPORTED_STT_MODELS).map(
160
190
  (modelRef) => defineCompatOpenAITranscriptionModel({
@@ -195,7 +225,13 @@ const model = (name, config) => {
195
225
  config
196
226
  });
197
227
  }
198
- if (name.includes("whisper") || name.includes("transcribe")) {
228
+ if (name.includes("whisper")) {
229
+ return openAIWhisperModelRef({
230
+ name,
231
+ config
232
+ });
233
+ }
234
+ if (name.includes("transcribe")) {
199
235
  return openAITranscriptionModelRef({
200
236
  name,
201
237
  config
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/openai/index.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n ActionMetadata,\n embedderActionMetadata,\n embedderRef,\n EmbedderReference,\n modelActionMetadata,\n ModelReference,\n z,\n} from 'genkit';\nimport { ResolvableAction, type GenkitPluginV2 } from 'genkit/plugin';\nimport { ActionType } from 'genkit/registry';\nimport OpenAI from 'openai';\nimport {\n defineCompatOpenAISpeechModel,\n defineCompatOpenAITranscriptionModel,\n SpeechConfigSchema,\n TranscriptionConfigSchema,\n} from '../audio.js';\nimport { defineCompatOpenAIEmbedder } from '../embedder.js';\nimport {\n defineCompatOpenAIImageModel,\n ImageGenerationCommonConfigSchema,\n} from '../image.js';\nimport { openAICompatible, PluginOptions } from '../index.js';\nimport { defineCompatOpenAIModel } from '../model.js';\nimport {\n gptImage1RequestBuilder,\n openAIImageModelRef,\n SUPPORTED_IMAGE_MODELS,\n} from './dalle.js';\nimport {\n SUPPORTED_EMBEDDING_MODELS,\n TextEmbeddingConfigSchema,\n} from './embedder.js';\nimport {\n OpenAIChatCompletionConfigSchema,\n openAIModelRef,\n SUPPORTED_GPT_MODELS,\n} from './gpt.js';\nimport { openAITranscriptionModelRef, SUPPORTED_STT_MODELS } from './stt.js';\nimport { openAISpeechModelRef, SUPPORTED_TTS_MODELS } from './tts.js';\n\nexport type OpenAIPluginOptions = Omit<PluginOptions, 'name' | 'baseURL'>;\n\nconst UNSUPPORTED_MODEL_MATCHERS = ['babbage', 'davinci', 'codex'];\n\nfunction createResolver(pluginOptions: PluginOptions) {\n return async (client: OpenAI, actionType: ActionType, actionName: string) => {\n if (actionType === 'embedder') {\n return defineCompatOpenAIEmbedder({\n name: actionName,\n client,\n pluginOptions,\n });\n } else if (\n actionName.includes('gpt-image-1') ||\n actionName.includes('dall-e')\n ) {\n const modelRef = openAIImageModelRef({ name: actionName });\n return defineCompatOpenAIImageModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else if (actionName.includes('tts')) {\n const modelRef = openAISpeechModelRef({ name: actionName });\n return defineCompatOpenAISpeechModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else if (\n actionName.includes('whisper') ||\n actionName.includes('transcribe')\n ) {\n const modelRef = openAITranscriptionModelRef({\n name: actionName,\n });\n return defineCompatOpenAITranscriptionModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else {\n const modelRef = openAIModelRef({ name: actionName });\n return defineCompatOpenAIModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n }\n };\n}\n\nfunction filterOpenAiModels(model: OpenAI.Model): boolean {\n return !UNSUPPORTED_MODEL_MATCHERS.some((m) => model.id.includes(m));\n}\n\nconst listActions = async (client: OpenAI): Promise<ActionMetadata[]> => {\n return await client.models.list().then((response) =>\n response.data.filter(filterOpenAiModels).map((model: OpenAI.Model) => {\n if (model.id.includes('embedding')) {\n return embedderActionMetadata({\n name: model.id,\n configSchema: TextEmbeddingConfigSchema,\n info: SUPPORTED_EMBEDDING_MODELS[model.id]?.info,\n });\n } else if (\n model.id.includes('gpt-image-1') ||\n model.id.includes('dall-e')\n ) {\n const modelRef =\n SUPPORTED_IMAGE_MODELS[model.id] ??\n openAIImageModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else if (model.id.includes('tts')) {\n const modelRef =\n SUPPORTED_TTS_MODELS[model.id] ??\n openAISpeechModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else if (\n model.id.includes('whisper') ||\n model.id.includes('transcribe')\n ) {\n const modelRef =\n SUPPORTED_STT_MODELS[model.id] ??\n openAITranscriptionModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else {\n const modelRef =\n SUPPORTED_GPT_MODELS[model.id] ?? openAIModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n }\n })\n );\n};\n\nexport function openAIPlugin(options?: OpenAIPluginOptions): GenkitPluginV2 {\n const pluginOptions = { name: 'openai', ...options };\n return openAICompatible({\n name: 'openai',\n ...options,\n initializer: async (client) => {\n const models = [] as ResolvableAction[];\n models.push(\n ...Object.values(SUPPORTED_GPT_MODELS).map((modelRef) =>\n defineCompatOpenAIModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_EMBEDDING_MODELS).map((embedderRef) =>\n defineCompatOpenAIEmbedder({\n name: embedderRef.name,\n client,\n pluginOptions,\n embedderRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_TTS_MODELS).map((modelRef) =>\n defineCompatOpenAISpeechModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_STT_MODELS).map((modelRef) =>\n defineCompatOpenAITranscriptionModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_IMAGE_MODELS).map((modelRef) =>\n defineCompatOpenAIImageModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n requestBuilder: modelRef.name.includes('gpt-image-1')\n ? gptImage1RequestBuilder\n : undefined,\n })\n )\n );\n return models;\n },\n resolver: createResolver(pluginOptions),\n listActions,\n });\n}\n\nexport type OpenAIPlugin = {\n (params?: OpenAIPluginOptions): GenkitPluginV2;\n model(\n name:\n | keyof typeof SUPPORTED_IMAGE_MODELS\n | (`dall-e${string}` & {})\n | (`gpt-image-${string}` & {}),\n config?: z.infer<typeof ImageGenerationCommonConfigSchema>\n ): ModelReference<typeof ImageGenerationCommonConfigSchema>;\n model(\n name:\n | keyof typeof SUPPORTED_TTS_MODELS\n | (`tts-${string}` & {})\n | (`${string}-tts` & {}),\n config?: z.infer<typeof SpeechConfigSchema>\n ): ModelReference<typeof SpeechConfigSchema>;\n model(\n name:\n | keyof typeof SUPPORTED_STT_MODELS\n | (`whisper-${string}` & {})\n | (`${string}-transcribe` & {}),\n config?: z.infer<typeof TranscriptionConfigSchema>\n ): ModelReference<typeof TranscriptionConfigSchema>;\n model(\n name:\n | keyof typeof SUPPORTED_GPT_MODELS\n | (`gpt-${string}` & {})\n | (`o${number}` & {}),\n config?: z.infer<typeof OpenAIChatCompletionConfigSchema>\n ): ModelReference<typeof OpenAIChatCompletionConfigSchema>;\n model(name: string, config?: any): ModelReference<z.ZodTypeAny>;\n embedder(\n name:\n | keyof typeof SUPPORTED_EMBEDDING_MODELS\n | (`${string}-embedding-${string}` & {}),\n config?: z.infer<typeof TextEmbeddingConfigSchema>\n ): EmbedderReference<typeof TextEmbeddingConfigSchema>;\n embedder(name: string, config?: any): EmbedderReference<z.ZodTypeAny>;\n};\n\nconst model = ((name: string, config?: any): ModelReference<z.ZodTypeAny> => {\n if (name.includes('gpt-image-1') || name.includes('dall-e')) {\n return openAIImageModelRef({\n name,\n config,\n });\n }\n if (name.includes('tts')) {\n return openAISpeechModelRef({\n name,\n config,\n });\n }\n if (name.includes('whisper') || name.includes('transcribe')) {\n return openAITranscriptionModelRef({\n name,\n config,\n });\n }\n return openAIModelRef({\n name,\n config,\n });\n}) as OpenAIPlugin['model'];\n\nconst embedder = ((\n name: string,\n config?: any\n): EmbedderReference<z.ZodTypeAny> => {\n return embedderRef({\n name,\n config,\n configSchema: TextEmbeddingConfigSchema,\n namespace: 'openai',\n });\n}) as OpenAIPlugin['embedder'];\n\n/**\n * This module provides an interface to the OpenAI models through the Genkit\n * plugin system. It allows users to interact with various models by providing\n * an API key and optional configuration.\n *\n * The main export is the `openai` plugin, which can be configured with an API\n * key either directly or through environment variables. It initializes the\n * OpenAI client and makes available the models for use.\n *\n * Exports:\n * - openai: The main plugin function to interact with OpenAI.\n *\n * Usage:\n * To use the models, initialize the openai plugin inside `configureGenkit` and\n * pass the configuration options. If no API key is provided in the options, the\n * environment variable `OPENAI_API_KEY` must be set.\n *\n * Example:\n * ```\n * import { openAI } from '@genkit-ai/compat-oai/openai';\n *\n * export default configureGenkit({\n * plugins: [\n * openai()\n * ... // other plugins\n * ]\n * });\n * ```\n */\nexport const openAI: OpenAIPlugin = Object.assign(openAIPlugin, {\n model,\n embedder,\n});\n\nexport default openAI;\n"],"mappings":"AAiBA;AAAA,EAEE;AAAA,EACA;AAAA,EAEA;AAAA,OAGK;AAIP;AAAA,EACE;AAAA,EACA;AAAA,OAGK;AACP,SAAS,kCAAkC;AAC3C;AAAA,EACE;AAAA,OAEK;AACP,SAAS,wBAAuC;AAChD,SAAS,+BAA+B;AACxC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AACP,SAAS,6BAA6B,4BAA4B;AAClE,SAAS,sBAAsB,4BAA4B;AAI3D,MAAM,6BAA6B,CAAC,WAAW,WAAW,OAAO;AAEjE,SAAS,eAAe,eAA8B;AACpD,SAAO,OAAO,QAAgB,YAAwB,eAAuB;AAC3E,QAAI,eAAe,YAAY;AAC7B,aAAO,2BAA2B;AAAA,QAChC,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WACE,WAAW,SAAS,aAAa,KACjC,WAAW,SAAS,QAAQ,GAC5B;AACA,YAAM,WAAW,oBAAoB,EAAE,MAAM,WAAW,CAAC;AACzD,aAAO,6BAA6B;AAAA,QAClC,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WAAW,WAAW,SAAS,KAAK,GAAG;AACrC,YAAM,WAAW,qBAAqB,EAAE,MAAM,WAAW,CAAC;AAC1D,aAAO,8BAA8B;AAAA,QACnC,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WACE,WAAW,SAAS,SAAS,KAC7B,WAAW,SAAS,YAAY,GAChC;AACA,YAAM,WAAW,4BAA4B;AAAA,QAC3C,MAAM;AAAA,MACR,CAAC;AACD,aAAO,qCAAqC;AAAA,QAC1C,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,WAAW,eAAe,EAAE,MAAM,WAAW,CAAC;AACpD,aAAO,wBAAwB;AAAA,QAC7B,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;AAEA,SAAS,mBAAmBA,QAA8B;AACxD,SAAO,CAAC,2BAA2B,KAAK,CAAC,MAAMA,OAAM,GAAG,SAAS,CAAC,CAAC;AACrE;AAEA,MAAM,cAAc,OAAO,WAA8C;AACvE,SAAO,MAAM,OAAO,OAAO,KAAK,EAAE;AAAA,IAAK,CAAC,aACtC,SAAS,KAAK,OAAO,kBAAkB,EAAE,IAAI,CAACA,WAAwB;AACpE,UAAIA,OAAM,GAAG,SAAS,WAAW,GAAG;AAClC,eAAO,uBAAuB;AAAA,UAC5B,MAAMA,OAAM;AAAA,UACZ,cAAc;AAAA,UACd,MAAM,2BAA2BA,OAAM,EAAE,GAAG;AAAA,QAC9C,CAAC;AAAA,MACH,WACEA,OAAM,GAAG,SAAS,aAAa,KAC/BA,OAAM,GAAG,SAAS,QAAQ,GAC1B;AACA,cAAM,WACJ,uBAAuBA,OAAM,EAAE,KAC/B,oBAAoB,EAAE,MAAMA,OAAM,GAAG,CAAC;AACxC,eAAO,oBAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,WAAWA,OAAM,GAAG,SAAS,KAAK,GAAG;AACnC,cAAM,WACJ,qBAAqBA,OAAM,EAAE,KAC7B,qBAAqB,EAAE,MAAMA,OAAM,GAAG,CAAC;AACzC,eAAO,oBAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,WACEA,OAAM,GAAG,SAAS,SAAS,KAC3BA,OAAM,GAAG,SAAS,YAAY,GAC9B;AACA,cAAM,WACJ,qBAAqBA,OAAM,EAAE,KAC7B,4BAA4B,EAAE,MAAMA,OAAM,GAAG,CAAC;AAChD,eAAO,oBAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,OAAO;AACL,cAAM,WACJ,qBAAqBA,OAAM,EAAE,KAAK,eAAe,EAAE,MAAMA,OAAM,GAAG,CAAC;AACrE,eAAO,oBAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,SAAS,aAAa,SAA+C;AAC1E,QAAM,gBAAgB,EAAE,MAAM,UAAU,GAAG,QAAQ;AACnD,SAAO,iBAAiB;AAAA,IACtB,MAAM;AAAA,IACN,GAAG;AAAA,IACH,aAAa,OAAO,WAAW;AAC7B,YAAM,SAAS,CAAC;AAChB,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,oBAAoB,EAAE;AAAA,UAAI,CAAC,aAC1C,wBAAwB;AAAA,YACtB,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,0BAA0B,EAAE;AAAA,UAAI,CAACC,iBAChD,2BAA2B;AAAA,YACzB,MAAMA,aAAY;AAAA,YAClB;AAAA,YACA;AAAA,YACA,aAAAA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,oBAAoB,EAAE;AAAA,UAAI,CAAC,aAC1C,8BAA8B;AAAA,YAC5B,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,oBAAoB,EAAE;AAAA,UAAI,CAAC,aAC1C,qCAAqC;AAAA,YACnC,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,sBAAsB,EAAE;AAAA,UAAI,CAAC,aAC5C,6BAA6B;AAAA,YAC3B,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,SAAS,KAAK,SAAS,aAAa,IAChD,0BACA;AAAA,UACN,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA,IACA,UAAU,eAAe,aAAa;AAAA,IACtC;AAAA,EACF,CAAC;AACH;AA0CA,MAAM,QAAS,CAAC,MAAc,WAA+C;AAC3E,MAAI,KAAK,SAAS,aAAa,KAAK,KAAK,SAAS,QAAQ,GAAG;AAC3D,WAAO,oBAAoB;AAAA,MACzB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,MAAI,KAAK,SAAS,KAAK,GAAG;AACxB,WAAO,qBAAqB;AAAA,MAC1B;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,MAAI,KAAK,SAAS,SAAS,KAAK,KAAK,SAAS,YAAY,GAAG;AAC3D,WAAO,4BAA4B;AAAA,MACjC;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,SAAO,eAAe;AAAA,IACpB;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAEA,MAAM,WAAY,CAChB,MACA,WACoC;AACpC,SAAO,YAAY;AAAA,IACjB;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd,WAAW;AAAA,EACb,CAAC;AACH;AA+BO,MAAM,SAAuB,OAAO,OAAO,cAAc;AAAA,EAC9D;AAAA,EACA;AACF,CAAC;AAED,IAAO,iBAAQ;","names":["model","embedderRef"]}
1
+ {"version":3,"sources":["../../src/openai/index.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n ActionMetadata,\n embedderActionMetadata,\n embedderRef,\n EmbedderReference,\n modelActionMetadata,\n ModelReference,\n z,\n} from 'genkit';\nimport { ResolvableAction, type GenkitPluginV2 } from 'genkit/plugin';\nimport { ActionType } from 'genkit/registry';\nimport OpenAI from 'openai';\nimport {\n defineCompatOpenAISpeechModel,\n defineCompatOpenAITranscriptionModel,\n SpeechConfigSchema,\n TranscriptionConfigSchema,\n} from '../audio.js';\nimport { defineCompatOpenAIEmbedder } from '../embedder.js';\nimport {\n defineCompatOpenAIImageModel,\n ImageGenerationCommonConfigSchema,\n} from '../image.js';\nimport { openAICompatible, PluginOptions } from '../index.js';\nimport { defineCompatOpenAIModel } from '../model.js';\nimport {\n gptImage1RequestBuilder,\n openAIImageModelRef,\n SUPPORTED_IMAGE_MODELS,\n} from './dalle.js';\nimport {\n SUPPORTED_EMBEDDING_MODELS,\n TextEmbeddingConfigSchema,\n} from './embedder.js';\nimport {\n OpenAIChatCompletionConfigSchema,\n openAIModelRef,\n SUPPORTED_GPT_MODELS,\n} from './gpt.js';\nimport { openAITranscriptionModelRef, SUPPORTED_STT_MODELS } from './stt.js';\nimport { openAISpeechModelRef, SUPPORTED_TTS_MODELS } from './tts.js';\nimport {\n defineOpenAIWhisperModel,\n openAIWhisperModelRef,\n SUPPORTED_WHISPER_MODELS,\n WhisperConfigSchema,\n} from './whisper.js';\n\nexport type OpenAIPluginOptions = Omit<PluginOptions, 'name' | 'baseURL'>;\n\nconst UNSUPPORTED_MODEL_MATCHERS = ['babbage', 'davinci', 'codex'];\n\nfunction createResolver(pluginOptions: PluginOptions) {\n return async (client: OpenAI, actionType: ActionType, actionName: string) => {\n if (actionType === 'embedder') {\n return defineCompatOpenAIEmbedder({\n name: actionName,\n client,\n pluginOptions,\n });\n } else if (\n actionName.includes('gpt-image-1') ||\n actionName.includes('dall-e')\n ) {\n const modelRef = openAIImageModelRef({ name: actionName });\n return defineCompatOpenAIImageModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else if (actionName.includes('tts')) {\n const modelRef = openAISpeechModelRef({ name: actionName });\n return defineCompatOpenAISpeechModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else if (actionName.includes('whisper')) {\n const modelRef = openAIWhisperModelRef({ name: actionName });\n return defineOpenAIWhisperModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else if (actionName.includes('transcribe')) {\n const modelRef = openAITranscriptionModelRef({\n name: actionName,\n });\n return defineCompatOpenAITranscriptionModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n } else {\n const modelRef = openAIModelRef({ name: actionName });\n return defineCompatOpenAIModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n });\n }\n };\n}\n\nfunction filterOpenAiModels(model: OpenAI.Model): boolean {\n return !UNSUPPORTED_MODEL_MATCHERS.some((m) => model.id.includes(m));\n}\n\nconst listActions = async (client: OpenAI): Promise<ActionMetadata[]> => {\n return await client.models.list().then((response) =>\n response.data.filter(filterOpenAiModels).map((model: OpenAI.Model) => {\n if (model.id.includes('embedding')) {\n return embedderActionMetadata({\n name: model.id,\n configSchema: TextEmbeddingConfigSchema,\n info: SUPPORTED_EMBEDDING_MODELS[model.id]?.info,\n });\n } else if (\n model.id.includes('gpt-image-1') ||\n model.id.includes('dall-e')\n ) {\n const modelRef =\n SUPPORTED_IMAGE_MODELS[model.id] ??\n openAIImageModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else if (model.id.includes('tts')) {\n const modelRef =\n SUPPORTED_TTS_MODELS[model.id] ??\n openAISpeechModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else if (model.id.includes('whisper')) {\n const modelRef =\n SUPPORTED_WHISPER_MODELS[model.id] ??\n openAIWhisperModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else if (model.id.includes('transcribe')) {\n const modelRef =\n SUPPORTED_STT_MODELS[model.id] ??\n openAITranscriptionModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n } else {\n const modelRef =\n SUPPORTED_GPT_MODELS[model.id] ?? openAIModelRef({ name: model.id });\n return modelActionMetadata({\n name: modelRef.name,\n info: modelRef.info,\n configSchema: modelRef.configSchema,\n });\n }\n })\n );\n};\n\nexport function openAIPlugin(options?: OpenAIPluginOptions): GenkitPluginV2 {\n const pluginOptions = { name: 'openai', ...options };\n return openAICompatible({\n name: 'openai',\n ...options,\n initializer: async (client) => {\n const models = [] as ResolvableAction[];\n models.push(\n ...Object.values(SUPPORTED_GPT_MODELS).map((modelRef) =>\n defineCompatOpenAIModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_EMBEDDING_MODELS).map((embedderRef) =>\n defineCompatOpenAIEmbedder({\n name: embedderRef.name,\n client,\n pluginOptions,\n embedderRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_TTS_MODELS).map((modelRef) =>\n defineCompatOpenAISpeechModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_WHISPER_MODELS).map((modelRef) =>\n defineOpenAIWhisperModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_STT_MODELS).map((modelRef) =>\n defineCompatOpenAITranscriptionModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n })\n )\n );\n models.push(\n ...Object.values(SUPPORTED_IMAGE_MODELS).map((modelRef) =>\n defineCompatOpenAIImageModel({\n name: modelRef.name,\n client,\n pluginOptions,\n modelRef,\n requestBuilder: modelRef.name.includes('gpt-image-1')\n ? gptImage1RequestBuilder\n : undefined,\n })\n )\n );\n return models;\n },\n resolver: createResolver(pluginOptions),\n listActions,\n });\n}\n\nexport type OpenAIPlugin = {\n (params?: OpenAIPluginOptions): GenkitPluginV2;\n model(\n name:\n | keyof typeof SUPPORTED_IMAGE_MODELS\n | (`dall-e${string}` & {})\n | (`gpt-image-${string}` & {}),\n config?: z.infer<typeof ImageGenerationCommonConfigSchema>\n ): ModelReference<typeof ImageGenerationCommonConfigSchema>;\n model(\n name:\n | keyof typeof SUPPORTED_TTS_MODELS\n | (`tts-${string}` & {})\n | (`${string}-tts` & {}),\n config?: z.infer<typeof SpeechConfigSchema>\n ): ModelReference<typeof SpeechConfigSchema>;\n model(\n name: keyof typeof SUPPORTED_WHISPER_MODELS | (`whisper-${string}` & {}),\n config?: z.infer<typeof WhisperConfigSchema>\n ): ModelReference<typeof WhisperConfigSchema>;\n model(\n name: keyof typeof SUPPORTED_STT_MODELS | (`${string}-transcribe` & {}),\n config?: z.infer<typeof TranscriptionConfigSchema>\n ): ModelReference<typeof TranscriptionConfigSchema>;\n model(\n name:\n | keyof typeof SUPPORTED_GPT_MODELS\n | (`gpt-${string}` & {})\n | (`o${number}` & {}),\n config?: z.infer<typeof OpenAIChatCompletionConfigSchema>\n ): ModelReference<typeof OpenAIChatCompletionConfigSchema>;\n model(name: string, config?: any): ModelReference<z.ZodTypeAny>;\n embedder(\n name:\n | keyof typeof SUPPORTED_EMBEDDING_MODELS\n | (`${string}-embedding-${string}` & {}),\n config?: z.infer<typeof TextEmbeddingConfigSchema>\n ): EmbedderReference<typeof TextEmbeddingConfigSchema>;\n embedder(name: string, config?: any): EmbedderReference<z.ZodTypeAny>;\n};\n\nconst model = ((name: string, config?: any): ModelReference<z.ZodTypeAny> => {\n if (name.includes('gpt-image-1') || name.includes('dall-e')) {\n return openAIImageModelRef({\n name,\n config,\n });\n }\n if (name.includes('tts')) {\n return openAISpeechModelRef({\n name,\n config,\n });\n }\n if (name.includes('whisper')) {\n return openAIWhisperModelRef({\n name,\n config,\n });\n }\n if (name.includes('transcribe')) {\n return openAITranscriptionModelRef({\n name,\n config,\n });\n }\n return openAIModelRef({\n name,\n config,\n });\n}) as OpenAIPlugin['model'];\n\nconst embedder = ((\n name: string,\n config?: any\n): EmbedderReference<z.ZodTypeAny> => {\n return embedderRef({\n name,\n config,\n configSchema: TextEmbeddingConfigSchema,\n namespace: 'openai',\n });\n}) as OpenAIPlugin['embedder'];\n\n/**\n * This module provides an interface to the OpenAI models through the Genkit\n * plugin system. It allows users to interact with various models by providing\n * an API key and optional configuration.\n *\n * The main export is the `openai` plugin, which can be configured with an API\n * key either directly or through environment variables. It initializes the\n * OpenAI client and makes available the models for use.\n *\n * Exports:\n * - openai: The main plugin function to interact with OpenAI.\n *\n * Usage:\n * To use the models, initialize the openai plugin inside `configureGenkit` and\n * pass the configuration options. If no API key is provided in the options, the\n * environment variable `OPENAI_API_KEY` must be set.\n *\n * Example:\n * ```\n * import { openAI } from '@genkit-ai/compat-oai/openai';\n *\n * export default configureGenkit({\n * plugins: [\n * openai()\n * ... // other plugins\n * ]\n * });\n * ```\n */\nexport const openAI: OpenAIPlugin = Object.assign(openAIPlugin, {\n model,\n embedder,\n});\n\nexport default openAI;\n"],"mappings":"AAiBA;AAAA,EAEE;AAAA,EACA;AAAA,EAEA;AAAA,OAGK;AAIP;AAAA,EACE;AAAA,EACA;AAAA,OAGK;AACP,SAAS,kCAAkC;AAC3C;AAAA,EACE;AAAA,OAEK;AACP,SAAS,wBAAuC;AAChD,SAAS,+BAA+B;AACxC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AACP,SAAS,6BAA6B,4BAA4B;AAClE,SAAS,sBAAsB,4BAA4B;AAC3D;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AAIP,MAAM,6BAA6B,CAAC,WAAW,WAAW,OAAO;AAEjE,SAAS,eAAe,eAA8B;AACpD,SAAO,OAAO,QAAgB,YAAwB,eAAuB;AAC3E,QAAI,eAAe,YAAY;AAC7B,aAAO,2BAA2B;AAAA,QAChC,MAAM;AAAA,QACN;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WACE,WAAW,SAAS,aAAa,KACjC,WAAW,SAAS,QAAQ,GAC5B;AACA,YAAM,WAAW,oBAAoB,EAAE,MAAM,WAAW,CAAC;AACzD,aAAO,6BAA6B;AAAA,QAClC,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WAAW,WAAW,SAAS,KAAK,GAAG;AACrC,YAAM,WAAW,qBAAqB,EAAE,MAAM,WAAW,CAAC;AAC1D,aAAO,8BAA8B;AAAA,QACnC,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WAAW,WAAW,SAAS,SAAS,GAAG;AACzC,YAAM,WAAW,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAC3D,aAAO,yBAAyB;AAAA,QAC9B,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,WAAW,WAAW,SAAS,YAAY,GAAG;AAC5C,YAAM,WAAW,4BAA4B;AAAA,QAC3C,MAAM;AAAA,MACR,CAAC;AACD,aAAO,qCAAqC;AAAA,QAC1C,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,YAAM,WAAW,eAAe,EAAE,MAAM,WAAW,CAAC;AACpD,aAAO,wBAAwB;AAAA,QAC7B,MAAM,SAAS;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;AAEA,SAAS,mBAAmBA,QAA8B;AACxD,SAAO,CAAC,2BAA2B,KAAK,CAAC,MAAMA,OAAM,GAAG,SAAS,CAAC,CAAC;AACrE;AAEA,MAAM,cAAc,OAAO,WAA8C;AACvE,SAAO,MAAM,OAAO,OAAO,KAAK,EAAE;AAAA,IAAK,CAAC,aACtC,SAAS,KAAK,OAAO,kBAAkB,EAAE,IAAI,CAACA,WAAwB;AACpE,UAAIA,OAAM,GAAG,SAAS,WAAW,GAAG;AAClC,eAAO,uBAAuB;AAAA,UAC5B,MAAMA,OAAM;AAAA,UACZ,cAAc;AAAA,UACd,MAAM,2BAA2BA,OAAM,EAAE,GAAG;AAAA,QAC9C,CAAC;AAAA,MACH,WACEA,OAAM,GAAG,SAAS,aAAa,KAC/BA,OAAM,GAAG,SAAS,QAAQ,GAC1B;AACA,cAAM,WACJ,uBAAuBA,OAAM,EAAE,KAC/B,oBAAoB,EAAE,MAAMA,OAAM,GAAG,CAAC;AACxC,eAAO,oBAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,WAAWA,OAAM,GAAG,SAAS,KAAK,GAAG;AACnC,cAAM,WACJ,qBAAqBA,OAAM,EAAE,KAC7B,qBAAqB,EAAE,MAAMA,OAAM,GAAG,CAAC;AACzC,eAAO,oBAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,WAAWA,OAAM,GAAG,SAAS,SAAS,GAAG;AACvC,cAAM,WACJ,yBAAyBA,OAAM,EAAE,KACjC,sBAAsB,EAAE,MAAMA,OAAM,GAAG,CAAC;AAC1C,eAAO,oBAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,WAAWA,OAAM,GAAG,SAAS,YAAY,GAAG;AAC1C,cAAM,WACJ,qBAAqBA,OAAM,EAAE,KAC7B,4BAA4B,EAAE,MAAMA,OAAM,GAAG,CAAC;AAChD,eAAO,oBAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH,OAAO;AACL,cAAM,WACJ,qBAAqBA,OAAM,EAAE,KAAK,eAAe,EAAE,MAAMA,OAAM,GAAG,CAAC;AACrE,eAAO,oBAAoB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,cAAc,SAAS;AAAA,QACzB,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,SAAS,aAAa,SAA+C;AAC1E,QAAM,gBAAgB,EAAE,MAAM,UAAU,GAAG,QAAQ;AACnD,SAAO,iBAAiB;AAAA,IACtB,MAAM;AAAA,IACN,GAAG;AAAA,IACH,aAAa,OAAO,WAAW;AAC7B,YAAM,SAAS,CAAC;AAChB,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,oBAAoB,EAAE;AAAA,UAAI,CAAC,aAC1C,wBAAwB;AAAA,YACtB,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,0BAA0B,EAAE;AAAA,UAAI,CAACC,iBAChD,2BAA2B;AAAA,YACzB,MAAMA,aAAY;AAAA,YAClB;AAAA,YACA;AAAA,YACA,aAAAA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,oBAAoB,EAAE;AAAA,UAAI,CAAC,aAC1C,8BAA8B;AAAA,YAC5B,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,wBAAwB,EAAE;AAAA,UAAI,CAAC,aAC9C,yBAAyB;AAAA,YACvB,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,oBAAoB,EAAE;AAAA,UAAI,CAAC,aAC1C,qCAAqC;AAAA,YACnC,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,QACL,GAAG,OAAO,OAAO,sBAAsB,EAAE;AAAA,UAAI,CAAC,aAC5C,6BAA6B;AAAA,YAC3B,MAAM,SAAS;AAAA,YACf;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,SAAS,KAAK,SAAS,aAAa,IAChD,0BACA;AAAA,UACN,CAAC;AAAA,QACH;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA,IACA,UAAU,eAAe,aAAa;AAAA,IACtC;AAAA,EACF,CAAC;AACH;AA2CA,MAAM,QAAS,CAAC,MAAc,WAA+C;AAC3E,MAAI,KAAK,SAAS,aAAa,KAAK,KAAK,SAAS,QAAQ,GAAG;AAC3D,WAAO,oBAAoB;AAAA,MACzB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,MAAI,KAAK,SAAS,KAAK,GAAG;AACxB,WAAO,qBAAqB;AAAA,MAC1B;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,MAAI,KAAK,SAAS,SAAS,GAAG;AAC5B,WAAO,sBAAsB;AAAA,MAC3B;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,MAAI,KAAK,SAAS,YAAY,GAAG;AAC/B,WAAO,4BAA4B;AAAA,MACjC;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACA,SAAO,eAAe;AAAA,IACpB;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAEA,MAAM,WAAY,CAChB,MACA,WACoC;AACpC,SAAO,YAAY;AAAA,IACjB;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd,WAAW;AAAA,EACb,CAAC;AACH;AA+BO,MAAM,SAAuB,OAAO,OAAO,cAAc;AAAA,EAC9D;AAAA,EACA;AACF,CAAC;AAED,IAAO,iBAAQ;","names":["model","embedderRef"]}
@@ -12,7 +12,6 @@ declare function openAITranscriptionModelRef<CustomOptions extends z.ZodTypeAny
12
12
  declare const SUPPORTED_STT_MODELS: {
13
13
  'gpt-4o-transcribe': genkit.ModelReference<any>;
14
14
  'gpt-4o-mini-transcribe': genkit.ModelReference<any>;
15
- 'whisper-1': genkit.ModelReference<any>;
16
15
  };
17
16
 
18
17
  export { SUPPORTED_STT_MODELS, openAITranscriptionModelRef };
@@ -12,7 +12,6 @@ declare function openAITranscriptionModelRef<CustomOptions extends z.ZodTypeAny
12
12
  declare const SUPPORTED_STT_MODELS: {
13
13
  'gpt-4o-transcribe': genkit.ModelReference<any>;
14
14
  'gpt-4o-mini-transcribe': genkit.ModelReference<any>;
15
- 'whisper-1': genkit.ModelReference<any>;
16
15
  };
17
16
 
18
17
  export { SUPPORTED_STT_MODELS, openAITranscriptionModelRef };
package/lib/openai/stt.js CHANGED
@@ -32,9 +32,6 @@ const SUPPORTED_STT_MODELS = {
32
32
  }),
33
33
  "gpt-4o-mini-transcribe": openAITranscriptionModelRef({
34
34
  name: "gpt-4o-mini-transcribe"
35
- }),
36
- "whisper-1": openAITranscriptionModelRef({
37
- name: "whisper-1"
38
35
  })
39
36
  };
40
37
  // Annotate the CommonJS export names for ESM import in node:
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/openai/stt.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { z } from 'genkit';\nimport { ModelInfo } from 'genkit/model';\nimport { compatOaiTranscriptionModelRef } from '../audio';\n\n/** OpenAI transcription ModelRef helper, same as the OpenAI-compatible spec. */\nexport function openAITranscriptionModelRef<\n CustomOptions extends z.ZodTypeAny = z.ZodTypeAny,\n>(params: {\n name: string;\n info?: ModelInfo;\n configSchema?: CustomOptions;\n config?: any;\n}) {\n return compatOaiTranscriptionModelRef({ ...params, namespace: 'openai' });\n}\n\nexport const SUPPORTED_STT_MODELS = {\n 'gpt-4o-transcribe': openAITranscriptionModelRef({\n name: 'gpt-4o-transcribe',\n }),\n 'gpt-4o-mini-transcribe': openAITranscriptionModelRef({\n name: 'gpt-4o-mini-transcribe',\n }),\n 'whisper-1': openAITranscriptionModelRef({\n name: 'whisper-1',\n }),\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmBA,mBAA+C;AAGxC,SAAS,4BAEd,QAKC;AACD,aAAO,6CAA+B,EAAE,GAAG,QAAQ,WAAW,SAAS,CAAC;AAC1E;AAEO,MAAM,uBAAuB;AAAA,EAClC,qBAAqB,4BAA4B;AAAA,IAC/C,MAAM;AAAA,EACR,CAAC;AAAA,EACD,0BAA0B,4BAA4B;AAAA,IACpD,MAAM;AAAA,EACR,CAAC;AAAA,EACD,aAAa,4BAA4B;AAAA,IACvC,MAAM;AAAA,EACR,CAAC;AACH;","names":[]}
1
+ {"version":3,"sources":["../../src/openai/stt.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { z } from 'genkit';\nimport { ModelInfo } from 'genkit/model';\nimport { compatOaiTranscriptionModelRef } from '../audio';\n\n/** OpenAI transcription ModelRef helper, same as the OpenAI-compatible spec. */\nexport function openAITranscriptionModelRef<\n CustomOptions extends z.ZodTypeAny = z.ZodTypeAny,\n>(params: {\n name: string;\n info?: ModelInfo;\n configSchema?: CustomOptions;\n config?: any;\n}) {\n return compatOaiTranscriptionModelRef({ ...params, namespace: 'openai' });\n}\n\nexport const SUPPORTED_STT_MODELS = {\n 'gpt-4o-transcribe': openAITranscriptionModelRef({\n name: 'gpt-4o-transcribe',\n }),\n 'gpt-4o-mini-transcribe': openAITranscriptionModelRef({\n name: 'gpt-4o-mini-transcribe',\n }),\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmBA,mBAA+C;AAGxC,SAAS,4BAEd,QAKC;AACD,aAAO,6CAA+B,EAAE,GAAG,QAAQ,WAAW,SAAS,CAAC;AAC1E;AAEO,MAAM,uBAAuB;AAAA,EAClC,qBAAqB,4BAA4B;AAAA,IAC/C,MAAM;AAAA,EACR,CAAC;AAAA,EACD,0BAA0B,4BAA4B;AAAA,IACpD,MAAM;AAAA,EACR,CAAC;AACH;","names":[]}
@@ -8,9 +8,6 @@ const SUPPORTED_STT_MODELS = {
8
8
  }),
9
9
  "gpt-4o-mini-transcribe": openAITranscriptionModelRef({
10
10
  name: "gpt-4o-mini-transcribe"
11
- }),
12
- "whisper-1": openAITranscriptionModelRef({
13
- name: "whisper-1"
14
11
  })
15
12
  };
16
13
  export {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/openai/stt.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { z } from 'genkit';\nimport { ModelInfo } from 'genkit/model';\nimport { compatOaiTranscriptionModelRef } from '../audio';\n\n/** OpenAI transcription ModelRef helper, same as the OpenAI-compatible spec. */\nexport function openAITranscriptionModelRef<\n CustomOptions extends z.ZodTypeAny = z.ZodTypeAny,\n>(params: {\n name: string;\n info?: ModelInfo;\n configSchema?: CustomOptions;\n config?: any;\n}) {\n return compatOaiTranscriptionModelRef({ ...params, namespace: 'openai' });\n}\n\nexport const SUPPORTED_STT_MODELS = {\n 'gpt-4o-transcribe': openAITranscriptionModelRef({\n name: 'gpt-4o-transcribe',\n }),\n 'gpt-4o-mini-transcribe': openAITranscriptionModelRef({\n name: 'gpt-4o-mini-transcribe',\n }),\n 'whisper-1': openAITranscriptionModelRef({\n name: 'whisper-1',\n }),\n};\n"],"mappings":"AAmBA,SAAS,sCAAsC;AAGxC,SAAS,4BAEd,QAKC;AACD,SAAO,+BAA+B,EAAE,GAAG,QAAQ,WAAW,SAAS,CAAC;AAC1E;AAEO,MAAM,uBAAuB;AAAA,EAClC,qBAAqB,4BAA4B;AAAA,IAC/C,MAAM;AAAA,EACR,CAAC;AAAA,EACD,0BAA0B,4BAA4B;AAAA,IACpD,MAAM;AAAA,EACR,CAAC;AAAA,EACD,aAAa,4BAA4B;AAAA,IACvC,MAAM;AAAA,EACR,CAAC;AACH;","names":[]}
1
+ {"version":3,"sources":["../../src/openai/stt.ts"],"sourcesContent":["/**\n * Copyright 2024 The Fire Company\n * Copyright 2024 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { z } from 'genkit';\nimport { ModelInfo } from 'genkit/model';\nimport { compatOaiTranscriptionModelRef } from '../audio';\n\n/** OpenAI transcription ModelRef helper, same as the OpenAI-compatible spec. */\nexport function openAITranscriptionModelRef<\n CustomOptions extends z.ZodTypeAny = z.ZodTypeAny,\n>(params: {\n name: string;\n info?: ModelInfo;\n configSchema?: CustomOptions;\n config?: any;\n}) {\n return compatOaiTranscriptionModelRef({ ...params, namespace: 'openai' });\n}\n\nexport const SUPPORTED_STT_MODELS = {\n 'gpt-4o-transcribe': openAITranscriptionModelRef({\n name: 'gpt-4o-transcribe',\n }),\n 'gpt-4o-mini-transcribe': openAITranscriptionModelRef({\n name: 'gpt-4o-mini-transcribe',\n }),\n};\n"],"mappings":"AAmBA,SAAS,sCAAsC;AAGxC,SAAS,4BAEd,QAKC;AACD,SAAO,+BAA+B,EAAE,GAAG,QAAQ,WAAW,SAAS,CAAC;AAC1E;AAEO,MAAM,uBAAuB;AAAA,EAClC,qBAAqB,4BAA4B;AAAA,IAC/C,MAAM;AAAA,EACR,CAAC;AAAA,EACD,0BAA0B,4BAA4B;AAAA,IACpD,MAAM;AAAA,EACR,CAAC;AACH;","names":[]}