@mariozechner/pi-ai 0.5.21 → 0.5.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1593,7 +1593,7 @@ export declare const PROVIDERS: {
1593
1593
  contextWindow: number;
1594
1594
  maxTokens: number;
1595
1595
  };
1596
- readonly "meta-llama/llama-3.1-405b-instruct": {
1596
+ readonly "meta-llama/llama-3.1-70b-instruct": {
1597
1597
  id: string;
1598
1598
  name: string;
1599
1599
  provider: string;
@@ -1609,7 +1609,7 @@ export declare const PROVIDERS: {
1609
1609
  contextWindow: number;
1610
1610
  maxTokens: number;
1611
1611
  };
1612
- readonly "meta-llama/llama-3.1-70b-instruct": {
1612
+ readonly "meta-llama/llama-3.1-405b-instruct": {
1613
1613
  id: string;
1614
1614
  name: string;
1615
1615
  provider: string;
@@ -1641,7 +1641,7 @@ export declare const PROVIDERS: {
1641
1641
  contextWindow: number;
1642
1642
  maxTokens: number;
1643
1643
  };
1644
- readonly "mistralai/mistral-7b-instruct:free": {
1644
+ readonly "mistralai/mistral-7b-instruct-v0.3": {
1645
1645
  id: string;
1646
1646
  name: string;
1647
1647
  provider: string;
@@ -1657,7 +1657,7 @@ export declare const PROVIDERS: {
1657
1657
  contextWindow: number;
1658
1658
  maxTokens: number;
1659
1659
  };
1660
- readonly "mistralai/mistral-7b-instruct": {
1660
+ readonly "mistralai/mistral-7b-instruct:free": {
1661
1661
  id: string;
1662
1662
  name: string;
1663
1663
  provider: string;
@@ -1673,7 +1673,7 @@ export declare const PROVIDERS: {
1673
1673
  contextWindow: number;
1674
1674
  maxTokens: number;
1675
1675
  };
1676
- readonly "mistralai/mistral-7b-instruct-v0.3": {
1676
+ readonly "mistralai/mistral-7b-instruct": {
1677
1677
  id: string;
1678
1678
  name: string;
1679
1679
  provider: string;
@@ -1721,7 +1721,7 @@ export declare const PROVIDERS: {
1721
1721
  contextWindow: number;
1722
1722
  maxTokens: number;
1723
1723
  };
1724
- readonly "meta-llama/llama-3-8b-instruct": {
1724
+ readonly "meta-llama/llama-3-70b-instruct": {
1725
1725
  id: string;
1726
1726
  name: string;
1727
1727
  provider: string;
@@ -1737,7 +1737,7 @@ export declare const PROVIDERS: {
1737
1737
  contextWindow: number;
1738
1738
  maxTokens: number;
1739
1739
  };
1740
- readonly "meta-llama/llama-3-70b-instruct": {
1740
+ readonly "meta-llama/llama-3-8b-instruct": {
1741
1741
  id: string;
1742
1742
  name: string;
1743
1743
  provider: string;
@@ -1849,7 +1849,7 @@ export declare const PROVIDERS: {
1849
1849
  contextWindow: number;
1850
1850
  maxTokens: number;
1851
1851
  };
1852
- readonly "mistralai/mistral-small": {
1852
+ readonly "mistralai/mistral-tiny": {
1853
1853
  id: string;
1854
1854
  name: string;
1855
1855
  provider: string;
@@ -1865,7 +1865,7 @@ export declare const PROVIDERS: {
1865
1865
  contextWindow: number;
1866
1866
  maxTokens: number;
1867
1867
  };
1868
- readonly "mistralai/mistral-tiny": {
1868
+ readonly "mistralai/mistral-small": {
1869
1869
  id: string;
1870
1870
  name: string;
1871
1871
  provider: string;
@@ -2468,7 +2468,7 @@ export declare const PROVIDERS: {
2468
2468
  contextWindow: number;
2469
2469
  maxTokens: number;
2470
2470
  };
2471
- readonly "gpt-3.5-turbo": {
2471
+ readonly "gpt-4": {
2472
2472
  id: string;
2473
2473
  name: string;
2474
2474
  provider: string;
@@ -2483,7 +2483,7 @@ export declare const PROVIDERS: {
2483
2483
  contextWindow: number;
2484
2484
  maxTokens: number;
2485
2485
  };
2486
- readonly "gpt-4": {
2486
+ readonly "gpt-4-0314": {
2487
2487
  id: string;
2488
2488
  name: string;
2489
2489
  provider: string;
@@ -2498,7 +2498,7 @@ export declare const PROVIDERS: {
2498
2498
  contextWindow: number;
2499
2499
  maxTokens: number;
2500
2500
  };
2501
- readonly "gpt-4-0314": {
2501
+ readonly "gpt-3.5-turbo": {
2502
2502
  id: string;
2503
2503
  name: string;
2504
2504
  provider: string;
@@ -2592,7 +2592,7 @@ export declare const PROVIDERS: {
2592
2592
  contextWindow: number;
2593
2593
  maxTokens: number;
2594
2594
  };
2595
- readonly "claude-3-5-haiku-latest": {
2595
+ readonly "claude-3-5-haiku-20241022": {
2596
2596
  id: string;
2597
2597
  name: string;
2598
2598
  provider: string;
@@ -2607,7 +2607,7 @@ export declare const PROVIDERS: {
2607
2607
  contextWindow: number;
2608
2608
  maxTokens: number;
2609
2609
  };
2610
- readonly "claude-3-5-haiku-20241022": {
2610
+ readonly "claude-3-5-haiku-latest": {
2611
2611
  id: string;
2612
2612
  name: string;
2613
2613
  provider: string;
@@ -1595,36 +1595,36 @@ export const PROVIDERS = {
1595
1595
  contextWindow: 131072,
1596
1596
  maxTokens: 16384,
1597
1597
  },
1598
- "meta-llama/llama-3.1-405b-instruct": {
1599
- id: "meta-llama/llama-3.1-405b-instruct",
1600
- name: "Meta: Llama 3.1 405B Instruct",
1598
+ "meta-llama/llama-3.1-70b-instruct": {
1599
+ id: "meta-llama/llama-3.1-70b-instruct",
1600
+ name: "Meta: Llama 3.1 70B Instruct",
1601
1601
  provider: "openrouter",
1602
1602
  baseUrl: "https://openrouter.ai/api/v1",
1603
1603
  reasoning: false,
1604
1604
  input: ["text"],
1605
1605
  cost: {
1606
- input: 0.7999999999999999,
1607
- output: 0.7999999999999999,
1606
+ input: 0.09999999999999999,
1607
+ output: 0.28,
1608
1608
  cacheRead: 0,
1609
1609
  cacheWrite: 0,
1610
1610
  },
1611
- contextWindow: 32768,
1611
+ contextWindow: 131072,
1612
1612
  maxTokens: 16384,
1613
1613
  },
1614
- "meta-llama/llama-3.1-70b-instruct": {
1615
- id: "meta-llama/llama-3.1-70b-instruct",
1616
- name: "Meta: Llama 3.1 70B Instruct",
1614
+ "meta-llama/llama-3.1-405b-instruct": {
1615
+ id: "meta-llama/llama-3.1-405b-instruct",
1616
+ name: "Meta: Llama 3.1 405B Instruct",
1617
1617
  provider: "openrouter",
1618
1618
  baseUrl: "https://openrouter.ai/api/v1",
1619
1619
  reasoning: false,
1620
1620
  input: ["text"],
1621
1621
  cost: {
1622
- input: 0.09999999999999999,
1623
- output: 0.28,
1622
+ input: 0.7999999999999999,
1623
+ output: 0.7999999999999999,
1624
1624
  cacheRead: 0,
1625
1625
  cacheWrite: 0,
1626
1626
  },
1627
- contextWindow: 131072,
1627
+ contextWindow: 32768,
1628
1628
  maxTokens: 16384,
1629
1629
  },
1630
1630
  "mistralai/mistral-nemo": {
@@ -1643,41 +1643,41 @@ export const PROVIDERS = {
1643
1643
  contextWindow: 32000,
1644
1644
  maxTokens: 4096,
1645
1645
  },
1646
- "mistralai/mistral-7b-instruct:free": {
1647
- id: "mistralai/mistral-7b-instruct:free",
1648
- name: "Mistral: Mistral 7B Instruct (free)",
1646
+ "mistralai/mistral-7b-instruct-v0.3": {
1647
+ id: "mistralai/mistral-7b-instruct-v0.3",
1648
+ name: "Mistral: Mistral 7B Instruct v0.3",
1649
1649
  provider: "openrouter",
1650
1650
  baseUrl: "https://openrouter.ai/api/v1",
1651
1651
  reasoning: false,
1652
1652
  input: ["text"],
1653
1653
  cost: {
1654
- input: 0,
1655
- output: 0,
1654
+ input: 0.028,
1655
+ output: 0.054,
1656
1656
  cacheRead: 0,
1657
1657
  cacheWrite: 0,
1658
1658
  },
1659
1659
  contextWindow: 32768,
1660
1660
  maxTokens: 16384,
1661
1661
  },
1662
- "mistralai/mistral-7b-instruct": {
1663
- id: "mistralai/mistral-7b-instruct",
1664
- name: "Mistral: Mistral 7B Instruct",
1662
+ "mistralai/mistral-7b-instruct:free": {
1663
+ id: "mistralai/mistral-7b-instruct:free",
1664
+ name: "Mistral: Mistral 7B Instruct (free)",
1665
1665
  provider: "openrouter",
1666
1666
  baseUrl: "https://openrouter.ai/api/v1",
1667
1667
  reasoning: false,
1668
1668
  input: ["text"],
1669
1669
  cost: {
1670
- input: 0.028,
1671
- output: 0.054,
1670
+ input: 0,
1671
+ output: 0,
1672
1672
  cacheRead: 0,
1673
1673
  cacheWrite: 0,
1674
1674
  },
1675
1675
  contextWindow: 32768,
1676
1676
  maxTokens: 16384,
1677
1677
  },
1678
- "mistralai/mistral-7b-instruct-v0.3": {
1679
- id: "mistralai/mistral-7b-instruct-v0.3",
1680
- name: "Mistral: Mistral 7B Instruct v0.3",
1678
+ "mistralai/mistral-7b-instruct": {
1679
+ id: "mistralai/mistral-7b-instruct",
1680
+ name: "Mistral: Mistral 7B Instruct",
1681
1681
  provider: "openrouter",
1682
1682
  baseUrl: "https://openrouter.ai/api/v1",
1683
1683
  reasoning: false,
@@ -1723,32 +1723,32 @@ export const PROVIDERS = {
1723
1723
  contextWindow: 128000,
1724
1724
  maxTokens: 4096,
1725
1725
  },
1726
- "meta-llama/llama-3-8b-instruct": {
1727
- id: "meta-llama/llama-3-8b-instruct",
1728
- name: "Meta: Llama 3 8B Instruct",
1726
+ "meta-llama/llama-3-70b-instruct": {
1727
+ id: "meta-llama/llama-3-70b-instruct",
1728
+ name: "Meta: Llama 3 70B Instruct",
1729
1729
  provider: "openrouter",
1730
1730
  baseUrl: "https://openrouter.ai/api/v1",
1731
1731
  reasoning: false,
1732
1732
  input: ["text"],
1733
1733
  cost: {
1734
- input: 0.03,
1735
- output: 0.06,
1734
+ input: 0.3,
1735
+ output: 0.39999999999999997,
1736
1736
  cacheRead: 0,
1737
1737
  cacheWrite: 0,
1738
1738
  },
1739
1739
  contextWindow: 8192,
1740
1740
  maxTokens: 16384,
1741
1741
  },
1742
- "meta-llama/llama-3-70b-instruct": {
1743
- id: "meta-llama/llama-3-70b-instruct",
1744
- name: "Meta: Llama 3 70B Instruct",
1742
+ "meta-llama/llama-3-8b-instruct": {
1743
+ id: "meta-llama/llama-3-8b-instruct",
1744
+ name: "Meta: Llama 3 8B Instruct",
1745
1745
  provider: "openrouter",
1746
1746
  baseUrl: "https://openrouter.ai/api/v1",
1747
1747
  reasoning: false,
1748
1748
  input: ["text"],
1749
1749
  cost: {
1750
- input: 0.3,
1751
- output: 0.39999999999999997,
1750
+ input: 0.03,
1751
+ output: 0.06,
1752
1752
  cacheRead: 0,
1753
1753
  cacheWrite: 0,
1754
1754
  },
@@ -1851,32 +1851,32 @@ export const PROVIDERS = {
1851
1851
  contextWindow: 128000,
1852
1852
  maxTokens: 4096,
1853
1853
  },
1854
- "mistralai/mistral-small": {
1855
- id: "mistralai/mistral-small",
1856
- name: "Mistral Small",
1854
+ "mistralai/mistral-tiny": {
1855
+ id: "mistralai/mistral-tiny",
1856
+ name: "Mistral Tiny",
1857
1857
  provider: "openrouter",
1858
1858
  baseUrl: "https://openrouter.ai/api/v1",
1859
1859
  reasoning: false,
1860
1860
  input: ["text"],
1861
1861
  cost: {
1862
- input: 0.19999999999999998,
1863
- output: 0.6,
1862
+ input: 0.25,
1863
+ output: 0.25,
1864
1864
  cacheRead: 0,
1865
1865
  cacheWrite: 0,
1866
1866
  },
1867
1867
  contextWindow: 32768,
1868
1868
  maxTokens: 4096,
1869
1869
  },
1870
- "mistralai/mistral-tiny": {
1871
- id: "mistralai/mistral-tiny",
1872
- name: "Mistral Tiny",
1870
+ "mistralai/mistral-small": {
1871
+ id: "mistralai/mistral-small",
1872
+ name: "Mistral Small",
1873
1873
  provider: "openrouter",
1874
1874
  baseUrl: "https://openrouter.ai/api/v1",
1875
1875
  reasoning: false,
1876
1876
  input: ["text"],
1877
1877
  cost: {
1878
- input: 0.25,
1879
- output: 0.25,
1878
+ input: 0.19999999999999998,
1879
+ output: 0.6,
1880
1880
  cacheRead: 0,
1881
1881
  cacheWrite: 0,
1882
1882
  },
@@ -2470,21 +2470,6 @@ export const PROVIDERS = {
2470
2470
  contextWindow: 16385,
2471
2471
  maxTokens: 4096,
2472
2472
  },
2473
- "gpt-3.5-turbo": {
2474
- id: "gpt-3.5-turbo",
2475
- name: "OpenAI: GPT-3.5 Turbo",
2476
- provider: "openai",
2477
- reasoning: false,
2478
- input: ["text"],
2479
- cost: {
2480
- input: 0.5,
2481
- output: 1.5,
2482
- cacheRead: 0,
2483
- cacheWrite: 0,
2484
- },
2485
- contextWindow: 16385,
2486
- maxTokens: 4096,
2487
- },
2488
2473
  "gpt-4": {
2489
2474
  id: "gpt-4",
2490
2475
  name: "OpenAI: GPT-4",
@@ -2515,6 +2500,21 @@ export const PROVIDERS = {
2515
2500
  contextWindow: 8191,
2516
2501
  maxTokens: 4096,
2517
2502
  },
2503
+ "gpt-3.5-turbo": {
2504
+ id: "gpt-3.5-turbo",
2505
+ name: "OpenAI: GPT-3.5 Turbo",
2506
+ provider: "openai",
2507
+ reasoning: false,
2508
+ input: ["text"],
2509
+ cost: {
2510
+ input: 0.5,
2511
+ output: 1.5,
2512
+ cacheRead: 0,
2513
+ cacheWrite: 0,
2514
+ },
2515
+ contextWindow: 16385,
2516
+ maxTokens: 4096,
2517
+ },
2518
2518
  }
2519
2519
  },
2520
2520
  anthropic: {
@@ -2594,9 +2594,9 @@ export const PROVIDERS = {
2594
2594
  contextWindow: 200000,
2595
2595
  maxTokens: 64000,
2596
2596
  },
2597
- "claude-3-5-haiku-latest": {
2598
- id: "claude-3-5-haiku-latest",
2599
- name: "Anthropic: Claude 3.5 Haiku",
2597
+ "claude-3-5-haiku-20241022": {
2598
+ id: "claude-3-5-haiku-20241022",
2599
+ name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
2600
2600
  provider: "anthropic",
2601
2601
  reasoning: false,
2602
2602
  input: ["text", "image"],
@@ -2609,9 +2609,9 @@ export const PROVIDERS = {
2609
2609
  contextWindow: 200000,
2610
2610
  maxTokens: 8192,
2611
2611
  },
2612
- "claude-3-5-haiku-20241022": {
2613
- id: "claude-3-5-haiku-20241022",
2614
- name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
2612
+ "claude-3-5-haiku-latest": {
2613
+ id: "claude-3-5-haiku-latest",
2614
+ name: "Anthropic: Claude 3.5 Haiku",
2615
2615
  provider: "anthropic",
2616
2616
  reasoning: false,
2617
2617
  input: ["text", "image"],