@modelence/ai 0.1.1-dev.0 → 0.1.1-dev.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +1 -1
- package/dist/index.js.map +1 -1
- package/package.json +2 -2
- package/src/index.ts +20 -7
package/dist/index.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import {getConfig}from'modelence/server';import {generateText}from'ai';import {createOpenAI}from'@ai-sdk/openai';import {createAnthropic}from'@ai-sdk/anthropic';import {createGoogleGenerativeAI}from'@ai-sdk/google';function
|
|
1
|
+
import {getConfig}from'modelence/server';import {startTransaction,captureError}from'modelence/telemetry';import {generateText}from'ai';import {createOpenAI}from'@ai-sdk/openai';import {createAnthropic}from'@ai-sdk/anthropic';import {createGoogleGenerativeAI}from'@ai-sdk/google';function d(e,r){switch(e){case "openai":return createOpenAI({apiKey:String(getConfig("_system.openai.apiKey"))})(r);case "anthropic":return createAnthropic({apiKey:String(getConfig("_system.anthropic.apiKey"))})(r);case "google":return createGoogleGenerativeAI({apiKey:String(getConfig("_system.google.apiKey"))})(r);default:throw new Error(`Unsupported provider: ${e}`)}}async function G(e){let{provider:r,model:o,...i}=e,n=startTransaction("ai","ai:generateText",{provider:r,model:o,messageCount:Array.isArray(e.messages)?e.messages.length:0,temperature:e.temperature});try{let t=await generateText({model:d(r,o),...i});return n.end(),t}catch(t){throw captureError(t),n.end("error"),t}}export{G as generateText};//# sourceMappingURL=index.js.map
|
|
2
2
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts"],"names":["getProviderModel","provider","model","createOpenAI","getConfig","createAnthropic","createGoogleGenerativeAI","generateText","options","restOptions","
|
|
1
|
+
{"version":3,"sources":["../src/index.ts"],"names":["getProviderModel","provider","model","createOpenAI","getConfig","createAnthropic","createGoogleGenerativeAI","generateText","options","restOptions","transaction","startTransaction","result","originalGenerateText","error","captureError"],"mappings":"uRA4BA,SAASA,CAAiBC,CAAAA,CAAAA,CAAoBC,CAAe,CAAA,CAC3D,OAAQD,CAAU,EAChB,KAAK,QAAA,CACH,OAAOE,YAAa,CAAA,CAClB,MAAQ,CAAA,MAAA,CAAOC,UAAU,uBAAuB,CAAC,CACnD,CAAC,EAAEF,CAAK,CAAA,CAEV,KAAK,WAAA,CACH,OAAOG,eAAgB,CAAA,CACrB,MAAQ,CAAA,MAAA,CAAOD,UAAU,0BAA0B,CAAC,CACtD,CAAC,EAAEF,CAAK,CAAA,CAEV,KAAK,QAAA,CACH,OAAOI,wBAAyB,CAAA,CAC9B,MAAQ,CAAA,MAAA,CAAOF,UAAU,uBAAuB,CAAC,CACnD,CAAC,CAAA,CAAEF,CAAK,CAEV,CAAA,QACE,MAAM,IAAI,MAAM,CAAyBD,sBAAAA,EAAAA,CAAQ,CAAE,CAAA,CACvD,CACF,CA2BA,eAAsBM,CAAaC,CAAAA,CAAAA,CAA8B,CAC/D,GAAM,CAAE,QAAAP,CAAAA,CAAAA,CAAU,MAAAC,CAAO,CAAA,GAAGO,CAAY,CAAA,CAAID,EAEtCE,CAAcC,CAAAA,gBAAAA,CAAiB,IAAM,CAAA,iBAAA,CAAmB,CAC5D,QAAAV,CAAAA,CAAAA,CACA,KAAAC,CAAAA,CAAAA,CACA,aAAc,KAAM,CAAA,OAAA,CAAQM,EAAQ,QAAQ,CAAA,CAAIA,EAAQ,QAAS,CAAA,MAAA,CAAS,CAC1E,CAAA,WAAA,CAAaA,EAAQ,WACvB,CAAC,CAED,CAAA,GAAI,CACF,IAAMI,CAAAA,CAAS,MAAMC,YAAAA,CAAqB,CACxC,KAAOb,CAAAA,CAAAA,CAAiBC,CAAUC,CAAAA,CAAK,EACvC,GAAGO,CACL,CAAC,CAAA,CAED,OAAAC,CAAY,CAAA,GAAA,EACLE,CAAAA,CACT,OAASE,CAAO,CAAA,CACd,MAAAC,YAAAA,CAAaD,CAAc,CAC3BJ,CAAAA,CAAAA,CAAY,IAAI,OAAO,CAAA,CACjBI,CACR,CACF","file":"index.js","sourcesContent":["import { getConfig } from 'modelence/server';\nimport { startTransaction, captureError } from 'modelence/telemetry';\nimport { generateText as originalGenerateText } from 'ai';\nimport { createOpenAI } from '@ai-sdk/openai';\nimport { createAnthropic } from '@ai-sdk/anthropic';\nimport { createGoogleGenerativeAI } from '@ai-sdk/google';\n\n/**\n * Supported AI providers for text generation.\n */\ntype Provider = 'openai' | 'anthropic' | 'google';\n\n// Extract the original generateText parameters and override the model property\ntype OriginalGenerateTextParams = Parameters<typeof originalGenerateText>[0];\n\n/**\n * Options for the Modelence generateText function.\n * \n * This interface extends all the standard AI SDK generateText options,\n * but replaces the model parameter with separate provider and model parameters.\n */\nexport interface GenerateTextOptions extends Omit<OriginalGenerateTextParams, 'model'> {\n /** The AI provider name */\n provider: Provider;\n /** The specific model name */\n model: string;\n}\n\nfunction getProviderModel(provider: Provider, model: string) {\n switch (provider) {\n case 'openai':\n return createOpenAI({\n apiKey: String(getConfig('_system.openai.apiKey')),\n })(model);\n \n case 'anthropic':\n return createAnthropic({\n apiKey: String(getConfig('_system.anthropic.apiKey')),\n })(model);\n \n case 'google':\n return createGoogleGenerativeAI({\n apiKey: String(getConfig('_system.google.apiKey')),\n })(model);\n \n default:\n throw new Error(`Unsupported provider: ${provider}`);\n }\n}\n\n/**\n * Generates text using AI models with built-in Modelence configuration and telemetry.\n * \n * This is a wrapper around the AI SDK's generateText function that automatically\n * configures providers using Modelence's server-side configuration system.\n * \n * @param options - Configuration options for text generation\n * @returns A promise that resolves to the generated text result\n * \n * @example\n * ```typescript\n * import { generateText } from '@modelence/ai';\n * \n * const response = await generateText({\n * provider: 'openai',\n * model: 'gpt-4o',\n * messages: [\n * { role: 'user', content: 'Write a haiku about programming' }\n * ],\n * temperature: 0.7\n * });\n * \n * console.log(response.text);\n * ```\n */\nexport async function generateText(options: GenerateTextOptions) {\n const { provider, model, ...restOptions } = options;\n \n const transaction = startTransaction('ai', 'ai:generateText', {\n provider, \n model,\n messageCount: Array.isArray(options.messages) ? options.messages.length : 0,\n temperature: options.temperature\n });\n\n try {\n const result = await originalGenerateText({\n model: getProviderModel(provider, model),\n ...restOptions,\n });\n \n transaction.end();\n return result;\n } catch (error) {\n captureError(error as Error);\n transaction.end('error');\n throw error;\n }\n}\n"]}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"type": "module",
|
|
3
3
|
"name": "@modelence/ai",
|
|
4
|
-
"version": "0.1.1-dev.
|
|
4
|
+
"version": "0.1.1-dev.3",
|
|
5
5
|
"description": "Modelence AI engine",
|
|
6
6
|
"exports": {
|
|
7
7
|
".": {
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
"@ai-sdk/google": "^1.2.19",
|
|
24
24
|
"@ai-sdk/openai": "^1.3.22",
|
|
25
25
|
"ai": "^4.3.16",
|
|
26
|
-
"modelence": "^0.5.
|
|
26
|
+
"modelence": "^0.5.1-dev.2"
|
|
27
27
|
},
|
|
28
28
|
"devDependencies": {
|
|
29
29
|
"tsup": "^8.3.6",
|
package/src/index.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { getConfig } from 'modelence/server';
|
|
2
|
+
import { startTransaction, captureError } from 'modelence/telemetry';
|
|
2
3
|
import { generateText as originalGenerateText } from 'ai';
|
|
3
4
|
import { createOpenAI } from '@ai-sdk/openai';
|
|
4
5
|
import { createAnthropic } from '@ai-sdk/anthropic';
|
|
@@ -75,12 +76,24 @@ function getProviderModel(provider: Provider, model: string) {
|
|
|
75
76
|
export async function generateText(options: GenerateTextOptions) {
|
|
76
77
|
const { provider, model, ...restOptions } = options;
|
|
77
78
|
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
model: providerModel,
|
|
84
|
-
...restOptions,
|
|
79
|
+
const transaction = startTransaction('ai', 'ai:generateText', {
|
|
80
|
+
provider,
|
|
81
|
+
model,
|
|
82
|
+
messageCount: Array.isArray(options.messages) ? options.messages.length : 0,
|
|
83
|
+
temperature: options.temperature
|
|
85
84
|
});
|
|
85
|
+
|
|
86
|
+
try {
|
|
87
|
+
const result = await originalGenerateText({
|
|
88
|
+
model: getProviderModel(provider, model),
|
|
89
|
+
...restOptions,
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
transaction.end();
|
|
93
|
+
return result;
|
|
94
|
+
} catch (error) {
|
|
95
|
+
captureError(error as Error);
|
|
96
|
+
transaction.end('error');
|
|
97
|
+
throw error;
|
|
98
|
+
}
|
|
86
99
|
}
|