@output.ai/llm 0.0.13 → 0.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +2 -1
  2. package/src/index.d.ts +106 -40
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@output.ai/llm",
3
- "version": "0.0.13",
3
+ "version": "0.0.15",
4
4
  "description": "Framework abstraction to interact with LLM models",
5
5
  "type": "module",
6
6
  "main": "src/index.js",
@@ -13,6 +13,7 @@
13
13
  "@ai-sdk/azure": "2.0.53",
14
14
  "@ai-sdk/openai": "2.0.52",
15
15
  "@output.ai/core": ">=0.0.1",
16
+ "@output.ai/prompt": ">=0.0.1",
16
17
  "ai": "5.0.48"
17
18
  },
18
19
  "license": "UNLICENSED"
package/src/index.d.ts CHANGED
@@ -1,44 +1,110 @@
1
- import type {
2
- GenerateTextOptions,
3
- GenerateObjectOptions
4
- } from 'ai';
5
-
6
- export interface Prompt {
7
- config: {
8
- provider: 'anthropic' | 'openai' | 'azure' ;
9
- model: string;
10
- temperature?: number;
11
- max_tokens?: number;
12
- };
13
- messages: Array<{
14
- role: string;
15
- content: string;
16
- }>;
17
- }
18
-
19
- // Omit prompt/messages from AI SDK options and add our Prompt type
20
- export type GenerateTextOptionsWithPrompt = Omit<
21
- GenerateTextOptions,
22
- 'prompt' | 'messages' | 'model'
23
- > & {
24
- prompt: Prompt,
25
- };
26
-
27
- export type GenerateObjectOptionsWithPrompt<SCHEMA> = Omit<
28
- GenerateObjectOptions<SCHEMA>,
29
- 'prompt' | 'messages' | 'model' | 'schema' | 'schemaName' | 'schemaDescription' | 'output'
30
- > & {
31
- prompt: Prompt,
32
- schema: Record<string, unknown> | object,
33
- schemaName?: string,
34
- schemaDescription?: string,
35
- output?: 'object' | 'array',
36
- };
1
+ import type * as AiTypes from 'ai';
2
+ import type { z as CoreZ } from '@output.ai/core';
3
+ import type { Prompt } from '@output.ai/prompt';
37
4
 
5
+ export type { Prompt };
6
+
7
+ type NativeGenerateTextArgs = Parameters<typeof AiTypes.generateText>[0];
8
+ type NativeGenerateObjectArgs = Parameters<typeof AiTypes.generateObject>[0];
9
+
10
+ /**
11
+ * Simplify types into a plain object while preserving unions
12
+ * (distributes over union members instead of collapsing their keys)
13
+ */
14
+ type Simplify<T> = T extends unknown ? { [K in keyof T]: T[K] } & {} : never;
15
+
16
+ /**
17
+ * Replace keys K in T with V, preserving unions by distributing over T
18
+ */
19
+ type Replace<T, K extends PropertyKey, V> = T extends unknown
20
+ ? Omit<T, Extract<K, keyof T>> & { [P in K]: V }
21
+ : never;
22
+
23
+ /**
24
+ * Text generation arguments
25
+ * Include all native AI SDK generateText options and a Prompt object from `@output.ai/prompt`
26
+ */
27
+ export type GenerateTextArgs = Simplify<
28
+ Replace<Partial<NativeGenerateTextArgs>, 'prompt', Prompt>
29
+ >;
30
+
31
+ /**
32
+ * Allow schemas from @output.ai/core's zod in addition to AI SDK's accepted schema types.
33
+ */
34
+ type WithCoreZodSchema<T> = T extends unknown
35
+ ? T extends { schema?: infer S }
36
+ ? Omit<T, 'schema'> & { schema?: S | CoreZ.ZodTypeAny }
37
+ : T
38
+ : never;
39
+
40
+ /**
41
+ * Object generation arguments
42
+ * Include all native AI SDK generateObject options and a Prompt object from `@output.ai/prompt`
43
+ */
44
+ export type GenerateObjectArgs = Simplify<
45
+ Replace<Partial<WithCoreZodSchema<NativeGenerateObjectArgs>>, 'prompt', Prompt>
46
+ >;
47
+
48
+ /**
49
+ * Use a LLM Model to generate text
50
+ *
51
+ * This function a wrapper over AI SDK's generateText function.
52
+ *
53
+ * It accepts the same arguments of the the original function, plus a "Prompt" object, generated using the `output.ai/prompt`.
54
+ *
55
+ * The Prompt object will set `model`, `messages`, `temperature` and `max_tokens`, however all these can be overwritten by their AI SDK native argument values.
56
+ *
57
+ * @param {GenerateTextArgs} args - Generation arguments
58
+ * @returns {Promise<string>}
59
+ */
38
60
  export function generateText(
39
- options: GenerateTextOptionsWithPrompt
61
+ args: GenerateTextArgs
40
62
  ): Promise<string>;
41
63
 
42
- export function generateObject<T = unknown>(
43
- options: GenerateObjectOptionsWithPrompt<T>
44
- ): Promise<T>;
64
+ /**
65
+ * Use a LLM Model to generate object
66
+ *
67
+ * This function a wrapper over AI SDK's generateObject function.
68
+ *
69
+ * It accepts the same arguments of the the original function, plus a "Prompt" object, generated using the `output.ai/prompt`.
70
+ *
71
+ * The Prompt object will set `model`, `messages`, `temperature` and `max_tokens`, however all these can be overwritten by their AI SDK native argument values.
72
+ *
73
+ * @param {GenerateObjectArgs} args - Generation arguments
74
+ * @returns {Promise<object>} An object matching the provided schema
75
+ */
76
+ export function generateObject<A extends GenerateObjectArgs & { schema: CoreZ.ZodTypeAny }>(
77
+ args: A
78
+ ): Promise<CoreZ.infer<A['schema']>>;
79
+
80
+ /**
81
+ * Use a LLM Model to generate object
82
+ *
83
+ * This function a wrapper over AI SDK's generateObject function.
84
+ *
85
+ * It accepts the same arguments of the the original function, plus a "Prompt" object, generated using the `output.ai/prompt`.
86
+ *
87
+ * The Prompt object will set `model`, `messages`, `temperature` and `max_tokens`, however all these can be overwritten by their AI SDK native argument values.
88
+ *
89
+ * @param {GenerateObjectArgs} args - Generation arguments
90
+ * @returns {Promise<object>} An object matching the provided enum
91
+ */
92
+ export function generateObject<A extends GenerateObjectArgs & { enum: readonly unknown[]; output: 'enum' }>(
93
+ args: A
94
+ ): Promise<A['enum'][number]>;
95
+
96
+ /**
97
+ * Use a LLM Model to generate object
98
+ *
99
+ * This function a wrapper over AI SDK's generateObject function.
100
+ *
101
+ * It accepts the same arguments of the the original function, plus a "Prompt" object, generated using the `output.ai/prompt`.
102
+ *
103
+ * The Prompt object will set `model`, `messages`, `temperature` and `max_tokens`, however all these can be overwritten by their AI SDK native argument values.
104
+ *
105
+ * @param {GenerateObjectArgs} args - Generation arguments
106
+ * @returns {Promise<object>} An object without a pre-defined schema schema
107
+ */
108
+ export function generateObject(
109
+ args: GenerateObjectArgs
110
+ ): Promise<object>;