@output.ai/llm 0.0.14 → 0.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/src/index.d.ts +55 -5
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@output.ai/llm",
3
- "version": "0.0.14",
3
+ "version": "0.0.15",
4
4
  "description": "Framework abstraction to interact with LLM models",
5
5
  "type": "module",
6
6
  "main": "src/index.js",
package/src/index.d.ts CHANGED
@@ -1,4 +1,5 @@
1
1
  import type * as AiTypes from 'ai';
2
+ import type { z as CoreZ } from '@output.ai/core';
2
3
  import type { Prompt } from '@output.ai/prompt';
3
4
 
4
5
  export type { Prompt };
@@ -7,24 +8,41 @@ type NativeGenerateTextArgs = Parameters<typeof AiTypes.generateText>[0];
7
8
  type NativeGenerateObjectArgs = Parameters<typeof AiTypes.generateObject>[0];
8
9
 
9
10
  /**
10
- * Simplify types into a plain object
11
+ * Simplify types into a plain object while preserving unions
12
+ * (distributes over union members instead of collapsing their keys)
11
13
  */
12
- type Simplify<T> = { [K in keyof T]: T[K] } & {};
14
+ type Simplify<T> = T extends unknown ? { [K in keyof T]: T[K] } & {} : never;
15
+
16
+ /**
17
+ * Replace keys K in T with V, preserving unions by distributing over T
18
+ */
19
+ type Replace<T, K extends PropertyKey, V> = T extends unknown
20
+ ? Omit<T, Extract<K, keyof T>> & { [P in K]: V }
21
+ : never;
13
22
 
14
23
  /**
15
24
  * Text generation arguments
16
25
  * Include all native AI SDK generateText options and a Prompt object from `@output.ai/prompt`
17
26
  */
18
27
  export type GenerateTextArgs = Simplify<
19
- Partial<Omit<NativeGenerateTextArgs, 'prompt'>> & { prompt: Prompt }
28
+ Replace<Partial<NativeGenerateTextArgs>, 'prompt', Prompt>
20
29
  >;
21
30
 
31
+ /**
32
+ * Allow schemas from @output.ai/core's zod in addition to AI SDK's accepted schema types.
33
+ */
34
+ type WithCoreZodSchema<T> = T extends unknown
35
+ ? T extends { schema?: infer S }
36
+ ? Omit<T, 'schema'> & { schema?: S | CoreZ.ZodTypeAny }
37
+ : T
38
+ : never;
39
+
22
40
  /**
23
41
  * Object generation arguments
24
42
  * Include all native AI SDK generateObject options and a Prompt object from `@output.ai/prompt`
25
43
  */
26
44
  export type GenerateObjectArgs = Simplify<
27
- Partial<Omit<NativeGenerateObjectArgs, 'prompt'>> & { prompt: Prompt }
45
+ Replace<Partial<WithCoreZodSchema<NativeGenerateObjectArgs>>, 'prompt', Prompt>
28
46
  >;
29
47
 
30
48
  /**
@@ -53,7 +71,39 @@ export function generateText(
53
71
  * The Prompt object will set `model`, `messages`, `temperature` and `max_tokens`, however all these can be overwritten by their AI SDK native argument values.
54
72
  *
55
73
  * @param {GenerateObjectArgs} args - Generation arguments
56
- * @returns {Promise<object>}
74
+ * @returns {Promise<object>} An object matching the provided schema
75
+ */
76
+ export function generateObject<A extends GenerateObjectArgs & { schema: CoreZ.ZodTypeAny }>(
77
+ args: A
78
+ ): Promise<CoreZ.infer<A['schema']>>;
79
+
80
+ /**
81
+ * Use a LLM Model to generate object
82
+ *
83
+ * This function a wrapper over AI SDK's generateObject function.
84
+ *
85
+ * It accepts the same arguments of the the original function, plus a "Prompt" object, generated using the `output.ai/prompt`.
86
+ *
87
+ * The Prompt object will set `model`, `messages`, `temperature` and `max_tokens`, however all these can be overwritten by their AI SDK native argument values.
88
+ *
89
+ * @param {GenerateObjectArgs} args - Generation arguments
90
+ * @returns {Promise<object>} An object matching the provided enum
91
+ */
92
+ export function generateObject<A extends GenerateObjectArgs & { enum: readonly unknown[]; output: 'enum' }>(
93
+ args: A
94
+ ): Promise<A['enum'][number]>;
95
+
96
+ /**
97
+ * Use a LLM Model to generate object
98
+ *
99
+ * This function a wrapper over AI SDK's generateObject function.
100
+ *
101
+ * It accepts the same arguments of the the original function, plus a "Prompt" object, generated using the `output.ai/prompt`.
102
+ *
103
+ * The Prompt object will set `model`, `messages`, `temperature` and `max_tokens`, however all these can be overwritten by their AI SDK native argument values.
104
+ *
105
+ * @param {GenerateObjectArgs} args - Generation arguments
106
+ * @returns {Promise<object>} An object without a pre-defined schema schema
57
107
  */
58
108
  export function generateObject(
59
109
  args: GenerateObjectArgs