modelfusion 0.121.0 → 0.121.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # Changelog
2
2
 
3
+ ## v0.121.2 - 2024-01-11
4
+
5
+ ### Fixed
6
+
7
+ - Ollama response schema for repeated calls with Ollama 0.1.19 completion models. Thanks [@jakedetels](https://github.com/Necmttn) for the bugfix!
8
+
9
+ ## v0.121.1 - 2024-01-10
10
+
11
+ ### Fixed
12
+
13
+ - Ollama response schema for repeated calls with Ollama 0.1.19 chat models. Thanks [@jakedetels](https://github.com/jakedetels) for the bug report!
14
+
3
15
  ## v0.121.0 - 2024-01-09
4
16
 
5
17
  ### Added
package/README.md CHANGED
@@ -583,7 +583,7 @@ modelfusion.setLogFormat("detailed-object"); // log full events
583
583
 
584
584
  ### [Examples & Tutorials](https://modelfusion.dev/tutorial)
585
585
 
586
- ### [Showcase](https://modelfusion.dev/showcase)
586
+ ### [Showcase](https://modelfusion.dev/tutorial/showcase)
587
587
 
588
588
  ### [API Reference](https://modelfusion.dev/api/modules)
589
589
 
@@ -219,7 +219,7 @@ const ollamaChatResponseSchema = zod_1.z.object({
219
219
  }),
220
220
  total_duration: zod_1.z.number(),
221
221
  load_duration: zod_1.z.number().optional(),
222
- prompt_eval_count: zod_1.z.number(),
222
+ prompt_eval_count: zod_1.z.number().optional(),
223
223
  prompt_eval_duration: zod_1.z.number().optional(),
224
224
  eval_count: zod_1.z.number(),
225
225
  eval_duration: zod_1.z.number(),
@@ -48,10 +48,10 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
48
48
  done: true;
49
49
  created_at: string;
50
50
  total_duration: number;
51
- prompt_eval_count: number;
52
51
  eval_count: number;
53
52
  eval_duration: number;
54
53
  load_duration?: number | undefined;
54
+ prompt_eval_count?: number | undefined;
55
55
  prompt_eval_duration?: number | undefined;
56
56
  };
57
57
  textGenerationResults: {
@@ -69,10 +69,10 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
69
69
  done: true;
70
70
  created_at: string;
71
71
  total_duration: number;
72
- prompt_eval_count: number;
73
72
  eval_count: number;
74
73
  eval_duration: number;
75
74
  load_duration?: number | undefined;
75
+ prompt_eval_count?: number | undefined;
76
76
  prompt_eval_duration?: number | undefined;
77
77
  };
78
78
  textGenerationResults: {
@@ -136,7 +136,7 @@ declare const ollamaChatResponseSchema: z.ZodObject<{
136
136
  }>;
137
137
  total_duration: z.ZodNumber;
138
138
  load_duration: z.ZodOptional<z.ZodNumber>;
139
- prompt_eval_count: z.ZodNumber;
139
+ prompt_eval_count: z.ZodOptional<z.ZodNumber>;
140
140
  prompt_eval_duration: z.ZodOptional<z.ZodNumber>;
141
141
  eval_count: z.ZodNumber;
142
142
  eval_duration: z.ZodNumber;
@@ -149,10 +149,10 @@ declare const ollamaChatResponseSchema: z.ZodObject<{
149
149
  done: true;
150
150
  created_at: string;
151
151
  total_duration: number;
152
- prompt_eval_count: number;
153
152
  eval_count: number;
154
153
  eval_duration: number;
155
154
  load_duration?: number | undefined;
155
+ prompt_eval_count?: number | undefined;
156
156
  prompt_eval_duration?: number | undefined;
157
157
  }, {
158
158
  model: string;
@@ -163,10 +163,10 @@ declare const ollamaChatResponseSchema: z.ZodObject<{
163
163
  done: true;
164
164
  created_at: string;
165
165
  total_duration: number;
166
- prompt_eval_count: number;
167
166
  eval_count: number;
168
167
  eval_duration: number;
169
168
  load_duration?: number | undefined;
169
+ prompt_eval_count?: number | undefined;
170
170
  prompt_eval_duration?: number | undefined;
171
171
  }>;
172
172
  export type OllamaChatResponse = z.infer<typeof ollamaChatResponseSchema>;
@@ -255,10 +255,10 @@ export declare const OllamaChatResponseFormat: {
255
255
  done: true;
256
256
  created_at: string;
257
257
  total_duration: number;
258
- prompt_eval_count: number;
259
258
  eval_count: number;
260
259
  eval_duration: number;
261
260
  load_duration?: number | undefined;
261
+ prompt_eval_count?: number | undefined;
262
262
  prompt_eval_duration?: number | undefined;
263
263
  }>;
264
264
  };
@@ -215,7 +215,7 @@ const ollamaChatResponseSchema = z.object({
215
215
  }),
216
216
  total_duration: z.number(),
217
217
  load_duration: z.number().optional(),
218
- prompt_eval_count: z.number(),
218
+ prompt_eval_count: z.number().optional(),
219
219
  prompt_eval_duration: z.number().optional(),
220
220
  eval_count: z.number(),
221
221
  eval_duration: z.number(),
@@ -214,7 +214,7 @@ const ollamaCompletionResponseSchema = zod_1.z.object({
214
214
  response: zod_1.z.string(),
215
215
  total_duration: zod_1.z.number(),
216
216
  load_duration: zod_1.z.number().optional(),
217
- prompt_eval_count: zod_1.z.number(),
217
+ prompt_eval_count: zod_1.z.number().optional(),
218
218
  prompt_eval_duration: zod_1.z.number().optional(),
219
219
  eval_count: zod_1.z.number(),
220
220
  eval_duration: zod_1.z.number(),
@@ -67,10 +67,10 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
67
67
  response: string;
68
68
  created_at: string;
69
69
  total_duration: number;
70
- prompt_eval_count: number;
71
70
  eval_count: number;
72
71
  eval_duration: number;
73
72
  load_duration?: number | undefined;
73
+ prompt_eval_count?: number | undefined;
74
74
  prompt_eval_duration?: number | undefined;
75
75
  context?: number[] | undefined;
76
76
  };
@@ -86,10 +86,10 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
86
86
  response: string;
87
87
  created_at: string;
88
88
  total_duration: number;
89
- prompt_eval_count: number;
90
89
  eval_count: number;
91
90
  eval_duration: number;
92
91
  load_duration?: number | undefined;
92
+ prompt_eval_count?: number | undefined;
93
93
  prompt_eval_duration?: number | undefined;
94
94
  context?: number[] | undefined;
95
95
  };
@@ -105,10 +105,10 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
105
105
  response: string;
106
106
  created_at: string;
107
107
  total_duration: number;
108
- prompt_eval_count: number;
109
108
  eval_count: number;
110
109
  eval_duration: number;
111
110
  load_duration?: number | undefined;
111
+ prompt_eval_count?: number | undefined;
112
112
  prompt_eval_duration?: number | undefined;
113
113
  context?: number[] | undefined;
114
114
  };
@@ -155,7 +155,7 @@ declare const ollamaCompletionResponseSchema: z.ZodObject<{
155
155
  response: z.ZodString;
156
156
  total_duration: z.ZodNumber;
157
157
  load_duration: z.ZodOptional<z.ZodNumber>;
158
- prompt_eval_count: z.ZodNumber;
158
+ prompt_eval_count: z.ZodOptional<z.ZodNumber>;
159
159
  prompt_eval_duration: z.ZodOptional<z.ZodNumber>;
160
160
  eval_count: z.ZodNumber;
161
161
  eval_duration: z.ZodNumber;
@@ -166,10 +166,10 @@ declare const ollamaCompletionResponseSchema: z.ZodObject<{
166
166
  response: string;
167
167
  created_at: string;
168
168
  total_duration: number;
169
- prompt_eval_count: number;
170
169
  eval_count: number;
171
170
  eval_duration: number;
172
171
  load_duration?: number | undefined;
172
+ prompt_eval_count?: number | undefined;
173
173
  prompt_eval_duration?: number | undefined;
174
174
  context?: number[] | undefined;
175
175
  }, {
@@ -178,10 +178,10 @@ declare const ollamaCompletionResponseSchema: z.ZodObject<{
178
178
  response: string;
179
179
  created_at: string;
180
180
  total_duration: number;
181
- prompt_eval_count: number;
182
181
  eval_count: number;
183
182
  eval_duration: number;
184
183
  load_duration?: number | undefined;
184
+ prompt_eval_count?: number | undefined;
185
185
  prompt_eval_duration?: number | undefined;
186
186
  context?: number[] | undefined;
187
187
  }>;
@@ -262,10 +262,10 @@ export declare const OllamaCompletionResponseFormat: {
262
262
  response: string;
263
263
  created_at: string;
264
264
  total_duration: number;
265
- prompt_eval_count: number;
266
265
  eval_count: number;
267
266
  eval_duration: number;
268
267
  load_duration?: number | undefined;
268
+ prompt_eval_count?: number | undefined;
269
269
  prompt_eval_duration?: number | undefined;
270
270
  context?: number[] | undefined;
271
271
  }>;
@@ -210,7 +210,7 @@ const ollamaCompletionResponseSchema = z.object({
210
210
  response: z.string(),
211
211
  total_duration: z.number(),
212
212
  load_duration: z.number().optional(),
213
- prompt_eval_count: z.number(),
213
+ prompt_eval_count: z.number().optional(),
214
214
  prompt_eval_duration: z.number().optional(),
215
215
  eval_count: z.number(),
216
216
  eval_duration: z.number(),
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "The TypeScript library for building AI applications.",
4
- "version": "0.121.0",
4
+ "version": "0.121.2",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [