@ai-sdk/openai-compatible 3.0.0-beta.7 → 3.0.0-beta.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # @ai-sdk/openai-compatible
2
2
 
3
+ ## 3.0.0-beta.8
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [3887c70]
8
+ - @ai-sdk/provider-utils@5.0.0-beta.6
9
+ - @ai-sdk/provider@4.0.0-beta.4
10
+
3
11
  ## 3.0.0-beta.7
4
12
 
5
13
  ### Patch Changes
@@ -324,13 +332,13 @@
324
332
  Before
325
333
 
326
334
  ```ts
327
- model.textEmbeddingModel('my-model-id');
335
+ model.textEmbeddingModel("my-model-id");
328
336
  ```
329
337
 
330
338
  After
331
339
 
332
340
  ```ts
333
- model.embeddingModel('my-model-id');
341
+ model.embeddingModel("my-model-id");
334
342
  ```
335
343
 
336
344
  - 2625a04: feat(openai); update spec for mcp approval
@@ -545,13 +553,13 @@
545
553
  Before
546
554
 
547
555
  ```ts
548
- model.textEmbeddingModel('my-model-id');
556
+ model.textEmbeddingModel("my-model-id");
549
557
  ```
550
558
 
551
559
  After
552
560
 
553
561
  ```ts
554
- model.embeddingModel('my-model-id');
562
+ model.embeddingModel("my-model-id");
555
563
  ```
556
564
 
557
565
  - Updated dependencies [8d9e8ad]
@@ -987,7 +995,7 @@
987
995
 
988
996
  ```js
989
997
  await generateImage({
990
- model: luma.image('photon-flash-1', {
998
+ model: luma.image("photon-flash-1", {
991
999
  maxImagesPerCall: 5,
992
1000
  pollIntervalMillis: 500,
993
1001
  }),
@@ -1000,7 +1008,7 @@
1000
1008
 
1001
1009
  ```js
1002
1010
  await generateImage({
1003
- model: luma.image('photon-flash-1'),
1011
+ model: luma.image("photon-flash-1"),
1004
1012
  prompt,
1005
1013
  n: 10,
1006
1014
  maxImagesPerCall: 5,
@@ -1269,7 +1277,7 @@
1269
1277
 
1270
1278
  ```js
1271
1279
  await generateImage({
1272
- model: luma.image('photon-flash-1', {
1280
+ model: luma.image("photon-flash-1", {
1273
1281
  maxImagesPerCall: 5,
1274
1282
  pollIntervalMillis: 500,
1275
1283
  }),
@@ -1282,7 +1290,7 @@
1282
1290
 
1283
1291
  ```js
1284
1292
  await generateImage({
1285
- model: luma.image('photon-flash-1'),
1293
+ model: luma.image("photon-flash-1"),
1286
1294
  prompt,
1287
1295
  n: 10,
1288
1296
  maxImagesPerCall: 5,
package/dist/index.js CHANGED
@@ -1665,7 +1665,7 @@ function toCamelCase(str) {
1665
1665
  var import_provider_utils6 = require("@ai-sdk/provider-utils");
1666
1666
 
1667
1667
  // src/version.ts
1668
- var VERSION = true ? "3.0.0-beta.7" : "0.0.0-test";
1668
+ var VERSION = true ? "3.0.0-beta.8" : "0.0.0-test";
1669
1669
 
1670
1670
  // src/openai-compatible-provider.ts
1671
1671
  function createOpenAICompatible(options) {