@output.ai/llm 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +30 -0
- package/package.json +21 -0
- package/src/ai-sdk.js +49 -0
- package/src/index.d.ts +33 -0
- package/src/index.js +2 -0
- package/src/schema.js +8 -0
package/README.md
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
# LLM Module
|
|
2
|
+
|
|
3
|
+
Provides llm calls abstractions.
|
|
4
|
+
|
|
5
|
+
## completions()
|
|
6
|
+
|
|
7
|
+
Allow to use chat messages with a LLM model.
|
|
8
|
+
|
|
9
|
+
```js
|
|
10
|
+
import { generateText } from '@output.ai/llm';
|
|
11
|
+
|
|
12
|
+
const response = generateText({
|
|
13
|
+
configs: {
|
|
14
|
+
model: 'model-name', // eg claude-3.5
|
|
15
|
+
provider: 'provider-name', // eg anthropic
|
|
16
|
+
},
|
|
17
|
+
messages: [
|
|
18
|
+
{
|
|
19
|
+
role: 'assistant',
|
|
20
|
+
content: 'You are an assistant...',
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
role: 'user',
|
|
24
|
+
content: 'Whats the capital of Nicaragua?',
|
|
25
|
+
},
|
|
26
|
+
],
|
|
27
|
+
});
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
The response is a string.
|
package/package.json
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@output.ai/llm",
|
|
3
|
+
"version": "0.0.7",
|
|
4
|
+
"description": "Framework abstraction to interact with LLM models",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "src/index.js",
|
|
7
|
+
"types": "src/index.d.ts",
|
|
8
|
+
"files": [
|
|
9
|
+
"./src"
|
|
10
|
+
],
|
|
11
|
+
"repository": {
|
|
12
|
+
"type": "git",
|
|
13
|
+
"url": "git+https://github.com/growthxai/flow-sdk"
|
|
14
|
+
},
|
|
15
|
+
"dependencies": {
|
|
16
|
+
"@ai-sdk/anthropic": "2.0.4",
|
|
17
|
+
"@ai-sdk/openai": "2.0.18",
|
|
18
|
+
"@output.ai/trace": "0.0.1",
|
|
19
|
+
"ai": "5.0.15"
|
|
20
|
+
}
|
|
21
|
+
}
|
package/src/ai-sdk.js
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import {
|
|
2
|
+
generateText as aiGenerateText,
|
|
3
|
+
generateObject as aiGenerateObject
|
|
4
|
+
} from 'ai';
|
|
5
|
+
import { anthropic } from '@ai-sdk/anthropic';
|
|
6
|
+
import { openai } from '@ai-sdk/openai';
|
|
7
|
+
import { trace } from '@output.ai/trace';
|
|
8
|
+
|
|
9
|
+
const providers = {
|
|
10
|
+
anthropic,
|
|
11
|
+
openai
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
export async function generateText( prompt ) {
|
|
15
|
+
const provider = providers[prompt.config.provider];
|
|
16
|
+
const model = provider( prompt.config.model );
|
|
17
|
+
|
|
18
|
+
const result = await aiGenerateText( {
|
|
19
|
+
model,
|
|
20
|
+
messages: prompt.messages,
|
|
21
|
+
temperature: prompt.config.temperature,
|
|
22
|
+
maxTokens: prompt.config.max_tokens ?? 1024
|
|
23
|
+
} );
|
|
24
|
+
|
|
25
|
+
trace( { lib: 'llm', event: 'generateText', input: prompt, output: result } );
|
|
26
|
+
|
|
27
|
+
return result.text;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export async function generateObject( prompt, llmSchema ) {
|
|
31
|
+
const provider = providers[prompt.config.provider];
|
|
32
|
+
const model = provider( prompt.config.model );
|
|
33
|
+
|
|
34
|
+
const result = await aiGenerateObject( {
|
|
35
|
+
model,
|
|
36
|
+
schema: llmSchema.schema,
|
|
37
|
+
schemaName: llmSchema.name,
|
|
38
|
+
schemaDescription: llmSchema.description,
|
|
39
|
+
output: llmSchema.output,
|
|
40
|
+
messages: prompt.messages,
|
|
41
|
+
temperature: prompt.config.temperature,
|
|
42
|
+
maxOutputTokens: prompt.config.max_tokens ?? 1024
|
|
43
|
+
} );
|
|
44
|
+
|
|
45
|
+
trace( { lib: 'llm', event: 'generateObject', input: prompt, output: result } );
|
|
46
|
+
|
|
47
|
+
return result.object;
|
|
48
|
+
}
|
|
49
|
+
|
package/src/index.d.ts
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
export interface Prompt {
|
|
2
|
+
config: {
|
|
3
|
+
provider: 'anthropic' | 'openai';
|
|
4
|
+
model: string;
|
|
5
|
+
temperature?: number;
|
|
6
|
+
max_tokens?: number;
|
|
7
|
+
};
|
|
8
|
+
messages: Array<{
|
|
9
|
+
role: string;
|
|
10
|
+
content: string;
|
|
11
|
+
}>;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export class LLMSchema {
|
|
15
|
+
name: string;
|
|
16
|
+
schema: Record<string, unknown> | object;
|
|
17
|
+
description: string;
|
|
18
|
+
output: 'object' | 'array';
|
|
19
|
+
|
|
20
|
+
constructor( options: {
|
|
21
|
+
name: string;
|
|
22
|
+
description: string;
|
|
23
|
+
schema: Record<string, unknown> | object;
|
|
24
|
+
output?: 'object' | 'array';
|
|
25
|
+
} );
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export function generateText( prompt: Prompt ): Promise<string>;
|
|
29
|
+
|
|
30
|
+
export function generateObject<T = unknown>(
|
|
31
|
+
prompt: Prompt,
|
|
32
|
+
llmSchema: LLMSchema
|
|
33
|
+
): Promise<T>;
|
package/src/index.js
ADDED