@mariozechner/pi-ai 0.12.14 → 0.12.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -6,5 +6,6 @@ export * from "./providers/openai-completions.js";
6
6
  export * from "./providers/openai-responses.js";
7
7
  export * from "./stream.js";
8
8
  export * from "./types.js";
9
+ export * from "./utils/overflow.js";
9
10
  export * from "./utils/typebox-helpers.js";
10
11
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,kBAAkB,CAAC;AACjC,cAAc,aAAa,CAAC;AAC5B,cAAc,0BAA0B,CAAC;AACzC,cAAc,uBAAuB,CAAC;AACtC,cAAc,mCAAmC,CAAC;AAClD,cAAc,iCAAiC,CAAC;AAChD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,4BAA4B,CAAC","sourcesContent":["export * from \"./agent/index.js\";\nexport * from \"./models.js\";\nexport * from \"./providers/anthropic.js\";\nexport * from \"./providers/google.js\";\nexport * from \"./providers/openai-completions.js\";\nexport * from \"./providers/openai-responses.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/typebox-helpers.js\";\n"]}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,kBAAkB,CAAC;AACjC,cAAc,aAAa,CAAC;AAC5B,cAAc,0BAA0B,CAAC;AACzC,cAAc,uBAAuB,CAAC;AACtC,cAAc,mCAAmC,CAAC;AAClD,cAAc,iCAAiC,CAAC;AAChD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC","sourcesContent":["export * from \"./agent/index.js\";\nexport * from \"./models.js\";\nexport * from \"./providers/anthropic.js\";\nexport * from \"./providers/google.js\";\nexport * from \"./providers/openai-completions.js\";\nexport * from \"./providers/openai-responses.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\n"]}
package/dist/index.js CHANGED
@@ -6,5 +6,6 @@ export * from "./providers/openai-completions.js";
6
6
  export * from "./providers/openai-responses.js";
7
7
  export * from "./stream.js";
8
8
  export * from "./types.js";
9
+ export * from "./utils/overflow.js";
9
10
  export * from "./utils/typebox-helpers.js";
10
11
  //# sourceMappingURL=index.js.map
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,kBAAkB,CAAC;AACjC,cAAc,aAAa,CAAC;AAC5B,cAAc,0BAA0B,CAAC;AACzC,cAAc,uBAAuB,CAAC;AACtC,cAAc,mCAAmC,CAAC;AAClD,cAAc,iCAAiC,CAAC;AAChD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,4BAA4B,CAAC","sourcesContent":["export * from \"./agent/index.js\";\nexport * from \"./models.js\";\nexport * from \"./providers/anthropic.js\";\nexport * from \"./providers/google.js\";\nexport * from \"./providers/openai-completions.js\";\nexport * from \"./providers/openai-responses.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/typebox-helpers.js\";\n"]}
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,kBAAkB,CAAC;AACjC,cAAc,aAAa,CAAC;AAC5B,cAAc,0BAA0B,CAAC;AACzC,cAAc,uBAAuB,CAAC;AACtC,cAAc,mCAAmC,CAAC;AAClD,cAAc,iCAAiC,CAAC;AAChD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC","sourcesContent":["export * from \"./agent/index.js\";\nexport * from \"./models.js\";\nexport * from \"./providers/anthropic.js\";\nexport * from \"./providers/google.js\";\nexport * from \"./providers/openai-completions.js\";\nexport * from \"./providers/openai-responses.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\n"]}
@@ -4978,7 +4978,7 @@ export declare const MODELS: {
4978
4978
  contextWindow: number;
4979
4979
  maxTokens: number;
4980
4980
  };
4981
- readonly "anthropic/claude-3.5-haiku": {
4981
+ readonly "anthropic/claude-3.5-haiku-20241022": {
4982
4982
  id: string;
4983
4983
  name: string;
4984
4984
  api: "openai-completions";
@@ -4995,7 +4995,7 @@ export declare const MODELS: {
4995
4995
  contextWindow: number;
4996
4996
  maxTokens: number;
4997
4997
  };
4998
- readonly "anthropic/claude-3.5-haiku-20241022": {
4998
+ readonly "anthropic/claude-3.5-haiku": {
4999
4999
  id: string;
5000
5000
  name: string;
5001
5001
  api: "openai-completions";
@@ -5233,7 +5233,7 @@ export declare const MODELS: {
5233
5233
  contextWindow: number;
5234
5234
  maxTokens: number;
5235
5235
  };
5236
- readonly "meta-llama/llama-3.1-405b-instruct": {
5236
+ readonly "meta-llama/llama-3.1-8b-instruct": {
5237
5237
  id: string;
5238
5238
  name: string;
5239
5239
  api: "openai-completions";
@@ -5250,7 +5250,7 @@ export declare const MODELS: {
5250
5250
  contextWindow: number;
5251
5251
  maxTokens: number;
5252
5252
  };
5253
- readonly "meta-llama/llama-3.1-8b-instruct": {
5253
+ readonly "meta-llama/llama-3.1-70b-instruct": {
5254
5254
  id: string;
5255
5255
  name: string;
5256
5256
  api: "openai-completions";
@@ -5267,7 +5267,7 @@ export declare const MODELS: {
5267
5267
  contextWindow: number;
5268
5268
  maxTokens: number;
5269
5269
  };
5270
- readonly "meta-llama/llama-3.1-70b-instruct": {
5270
+ readonly "meta-llama/llama-3.1-405b-instruct": {
5271
5271
  id: string;
5272
5272
  name: string;
5273
5273
  api: "openai-completions";
@@ -5301,7 +5301,7 @@ export declare const MODELS: {
5301
5301
  contextWindow: number;
5302
5302
  maxTokens: number;
5303
5303
  };
5304
- readonly "openai/gpt-4o-mini-2024-07-18": {
5304
+ readonly "openai/gpt-4o-mini": {
5305
5305
  id: string;
5306
5306
  name: string;
5307
5307
  api: "openai-completions";
@@ -5318,7 +5318,7 @@ export declare const MODELS: {
5318
5318
  contextWindow: number;
5319
5319
  maxTokens: number;
5320
5320
  };
5321
- readonly "openai/gpt-4o-mini": {
5321
+ readonly "openai/gpt-4o-mini-2024-07-18": {
5322
5322
  id: string;
5323
5323
  name: string;
5324
5324
  api: "openai-completions";
@@ -5420,7 +5420,7 @@ export declare const MODELS: {
5420
5420
  contextWindow: number;
5421
5421
  maxTokens: number;
5422
5422
  };
5423
- readonly "openai/gpt-4o-2024-05-13": {
5423
+ readonly "openai/gpt-4o": {
5424
5424
  id: string;
5425
5425
  name: string;
5426
5426
  api: "openai-completions";
@@ -5437,7 +5437,7 @@ export declare const MODELS: {
5437
5437
  contextWindow: number;
5438
5438
  maxTokens: number;
5439
5439
  };
5440
- readonly "openai/gpt-4o": {
5440
+ readonly "openai/gpt-4o:extended": {
5441
5441
  id: string;
5442
5442
  name: string;
5443
5443
  api: "openai-completions";
@@ -5454,7 +5454,7 @@ export declare const MODELS: {
5454
5454
  contextWindow: number;
5455
5455
  maxTokens: number;
5456
5456
  };
5457
- readonly "openai/gpt-4o:extended": {
5457
+ readonly "openai/gpt-4o-2024-05-13": {
5458
5458
  id: string;
5459
5459
  name: string;
5460
5460
  api: "openai-completions";
@@ -5471,7 +5471,7 @@ export declare const MODELS: {
5471
5471
  contextWindow: number;
5472
5472
  maxTokens: number;
5473
5473
  };
5474
- readonly "meta-llama/llama-3-70b-instruct": {
5474
+ readonly "meta-llama/llama-3-8b-instruct": {
5475
5475
  id: string;
5476
5476
  name: string;
5477
5477
  api: "openai-completions";
@@ -5488,7 +5488,7 @@ export declare const MODELS: {
5488
5488
  contextWindow: number;
5489
5489
  maxTokens: number;
5490
5490
  };
5491
- readonly "meta-llama/llama-3-8b-instruct": {
5491
+ readonly "meta-llama/llama-3-70b-instruct": {
5492
5492
  id: string;
5493
5493
  name: string;
5494
5494
  api: "openai-completions";
@@ -5590,7 +5590,7 @@ export declare const MODELS: {
5590
5590
  contextWindow: number;
5591
5591
  maxTokens: number;
5592
5592
  };
5593
- readonly "openai/gpt-3.5-turbo-0613": {
5593
+ readonly "openai/gpt-4-turbo-preview": {
5594
5594
  id: string;
5595
5595
  name: string;
5596
5596
  api: "openai-completions";
@@ -5607,7 +5607,7 @@ export declare const MODELS: {
5607
5607
  contextWindow: number;
5608
5608
  maxTokens: number;
5609
5609
  };
5610
- readonly "openai/gpt-4-turbo-preview": {
5610
+ readonly "openai/gpt-3.5-turbo-0613": {
5611
5611
  id: string;
5612
5612
  name: string;
5613
5613
  api: "openai-completions";