@output.ai/llm 0.0.8 → 0.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/src/ai_sdk.js +2 -2
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@output.ai/llm",
3
- "version": "0.0.8",
3
+ "version": "0.0.9",
4
4
  "description": "Framework abstraction to interact with LLM models",
5
5
  "type": "module",
6
6
  "main": "src/index.js",
package/src/ai_sdk.js CHANGED
@@ -19,7 +19,7 @@ export async function generateText( prompt ) {
19
19
  model,
20
20
  messages: prompt.messages,
21
21
  temperature: prompt.config.temperature,
22
- maxTokens: prompt.config.max_tokens ?? 1024
22
+ maxOutputTokens: prompt.config.max_tokens ?? 64000
23
23
  } );
24
24
 
25
25
  trace( { lib: 'llm', event: 'generateText', input: prompt, output: result } );
@@ -39,7 +39,7 @@ export async function generateObject( prompt, llmSchema ) {
39
39
  output: llmSchema.output,
40
40
  messages: prompt.messages,
41
41
  temperature: prompt.config.temperature,
42
- maxOutputTokens: prompt.config.max_tokens ?? 1024
42
+ maxOutputTokens: prompt.config.max_tokens ?? 64000
43
43
  } );
44
44
 
45
45
  trace( { lib: 'llm', event: 'generateObject', input: prompt, output: result } );