@alpic80/rivet-ai-sdk-provider 2.0.0 → 2.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,199 +1,41 @@
1
- # OpenRouter Provider for Vercel AI SDK
1
+ # Rivet Provider for Vercel AI SDK
2
2
 
3
- The [OpenRouter](https://openrouter.ai/) provider for the [Vercel AI SDK](https://sdk.vercel.ai/docs) gives access to over 300 large language models on the OpenRouter chat and completion APIs.
3
+ This is the [Rivet](https://rivet.ironcladapp.com) provider for the [Vercel AI SDK](https://sdk.vercel.ai/docs) giving you access to run your Rivet Graphs as a supported LM.
4
4
 
5
5
  ## Setup for AI SDK v5
6
6
 
7
7
  ```bash
8
8
  # For pnpm
9
- pnpm add @openrouter/ai-sdk-provider
9
+ pnpm add @alpic80/rivet-ai-sdk-provider
10
10
 
11
11
  # For npm
12
- npm install @openrouter/ai-sdk-provider
12
+ npm install @alpic80/rivet-ai-sdk-provider
13
13
 
14
14
  # For yarn
15
- yarn add @openrouter/ai-sdk-provider
16
- ```
17
-
18
- ## (LEGACY) Setup for AI SDK v4
19
-
20
- ```bash
21
- # For pnpm
22
- pnpm add @openrouter/ai-sdk-provider@ai-sdk-v4
23
-
24
- # For npm
25
- npm install @openrouter/ai-sdk-provider@ai-sdk-v4
26
-
27
- # For yarn
28
- yarn add @openrouter/ai-sdk-provider@ai-sdk-v4
29
-
15
+ yarn add @alpic80/rivet-ai-sdk-provider
30
16
  ```
31
17
 
32
18
  ## Provider Instance
33
19
 
34
- You can import the default provider instance `openrouter` from `@openrouter/ai-sdk-provider`:
20
+ You can import the default provider instance `rivet` from `@alpic80/rivet-ai-sdk-provider`:
35
21
 
36
22
  ```ts
37
- import { openrouter } from '@openrouter/ai-sdk-provider';
23
+ import { rivet } from '@alpic80/rivet-ai-sdk-provider';
38
24
  ```
39
25
 
40
26
  ## Example
41
27
 
42
28
  ```ts
43
- import { openrouter } from '@openrouter/ai-sdk-provider';
29
+ import { rivet } from '"@alpic80/rivet-ai-sdk-provider';
44
30
  import { generateText } from 'ai';
45
31
 
46
32
  const { text } = await generateText({
47
- model: openrouter('openai/gpt-4o'),
33
+ model: rivet('myGraph.rivet-project'),
48
34
  prompt: 'Write a vegetarian lasagna recipe for 4 people.',
49
35
  });
50
36
  ```
51
37
 
52
- ## Supported models
53
-
54
- This list is not a definitive list of models supported by OpenRouter, as it constantly changes as we add new models (and deprecate old ones) to our system. You can find the latest list of models supported by OpenRouter [here](https://openrouter.ai/models).
55
-
56
- You can find the latest list of tool-supported models supported by OpenRouter [here](https://openrouter.ai/models?order=newest&supported_parameters=tools). (Note: This list may contain models that are not compatible with the AI SDK.)
57
-
58
- ## Passing Extra Body to OpenRouter
59
-
60
- There are 3 ways to pass extra body to OpenRouter:
61
-
62
- 1. Via the `providerOptions.openrouter` property:
63
-
64
- ```typescript
65
- import { createOpenRouter } from '@openrouter/ai-sdk-provider';
66
- import { streamText } from 'ai';
67
-
68
- const openrouter = createOpenRouter({ apiKey: 'your-api-key' });
69
- const model = openrouter('anthropic/claude-3.7-sonnet:thinking');
70
- await streamText({
71
- model,
72
- messages: [{ role: 'user', content: 'Hello' }],
73
- providerOptions: {
74
- openrouter: {
75
- reasoning: {
76
- max_tokens: 10,
77
- },
78
- },
79
- },
80
- });
81
- ```
82
-
83
- 2. Via the `extraBody` property in the model settings:
84
-
85
- ```typescript
86
- import { createOpenRouter } from '@openrouter/ai-sdk-provider';
87
- import { streamText } from 'ai';
88
-
89
- const openrouter = createOpenRouter({ apiKey: 'your-api-key' });
90
- const model = openrouter('anthropic/claude-3.7-sonnet:thinking', {
91
- extraBody: {
92
- reasoning: {
93
- max_tokens: 10,
94
- },
95
- },
96
- });
97
- await streamText({
98
- model,
99
- messages: [{ role: 'user', content: 'Hello' }],
100
- });
101
- ```
38
+ ## Required Configuration
102
39
 
103
- 3. Via the `extraBody` property in the model factory.
104
-
105
- ```typescript
106
- import { createOpenRouter } from '@openrouter/ai-sdk-provider';
107
- import { streamText } from 'ai';
108
-
109
- const openrouter = createOpenRouter({
110
- apiKey: 'your-api-key',
111
- extraBody: {
112
- reasoning: {
113
- max_tokens: 10,
114
- },
115
- },
116
- });
117
- const model = openrouter('anthropic/claude-3.7-sonnet:thinking');
118
- await streamText({
119
- model,
120
- messages: [{ role: 'user', content: 'Hello' }],
121
- });
122
- ```
123
-
124
- ## Anthropic Prompt Caching
125
-
126
- You can include Anthropic-specific options directly in your messages when using functions like `streamText`. The OpenRouter provider will automatically convert these messages to the correct format internally.
127
-
128
- ### Basic Usage
129
-
130
- ```typescript
131
- import { createOpenRouter } from '@openrouter/ai-sdk-provider';
132
- import { streamText } from 'ai';
133
-
134
- const openrouter = createOpenRouter({ apiKey: 'your-api-key' });
135
- const model = openrouter('anthropic/<supported-caching-model>');
136
-
137
- await streamText({
138
- model,
139
- messages: [
140
- {
141
- role: 'system',
142
- content:
143
- 'You are a podcast summary assistant. You are detail-oriented and critical about the content.',
144
- },
145
- {
146
- role: 'user',
147
- content: [
148
- {
149
- type: 'text',
150
- text: 'Given the text body below:',
151
- },
152
- {
153
- type: 'text',
154
- text: `<LARGE BODY OF TEXT>`,
155
- providerOptions: {
156
- openrouter: {
157
- cacheControl: { type: 'ephemeral' },
158
- },
159
- },
160
- },
161
- {
162
- type: 'text',
163
- text: 'List the speakers?',
164
- },
165
- ],
166
- },
167
- ],
168
- });
169
- ```
170
-
171
- ## Use Cases
172
-
173
- ### Usage Accounting
174
-
175
- The provider supports [OpenRouter usage accounting](https://openrouter.ai/docs/use-cases/usage-accounting), which allows you to track token usage details directly in your API responses, without making additional API calls.
176
-
177
- ```typescript
178
- // Enable usage accounting
179
- const model = openrouter('openai/gpt-3.5-turbo', {
180
- usage: {
181
- include: true,
182
- },
183
- });
184
-
185
- // Access usage accounting data
186
- const result = await generateText({
187
- model,
188
- prompt: 'Hello, how are you today?',
189
- });
190
-
191
- // Provider-specific usage details (available in providerMetadata)
192
- if (result.providerMetadata?.openrouter?.usage) {
193
- console.log('Cost:', result.providerMetadata.openrouter.usage.cost);
194
- console.log(
195
- 'Total Tokens:',
196
- result.providerMetadata.openrouter.usage.totalTokens,
197
- );
198
- }
199
- ```
40
+ In order to use the Rivet AI SDK provider you will need to also install and run the Adion custom Rivet server
41
+ available on npm (@alpic80/rivet-cli) and to configure it with support for PROJECTS_ROOT_DIR (see serve.md for details)
package/dist/index.js CHANGED
@@ -1109,7 +1109,7 @@ prepare tools input:${printObject(tools)}`);
1109
1109
  }
1110
1110
 
1111
1111
  // src/version.ts
1112
- var VERSION = true ? "2.0.0" : "0.0.0-test";
1112
+ var VERSION = true ? "2.0.2" : "0.0.0-test";
1113
1113
 
1114
1114
  // src/post-to-api.ts
1115
1115
  var getOriginalFetch = () => globalThis.fetch;
@@ -1761,7 +1761,7 @@ function createRivetEventSourceResponseHandler(id, model) {
1761
1761
  }));
1762
1762
  }
1763
1763
  function mapRivetEventToOpenAIChunk(eventData, id, model) {
1764
- var _a15, _b, _c, _d, _e, _f, _g;
1764
+ var _a15, _b, _c, _d, _e, _f, _g, _h;
1765
1765
  const eventType = eventData.type;
1766
1766
  switch (eventType) {
1767
1767
  case "partialOutput":
@@ -1782,6 +1782,24 @@ function mapRivetEventToOpenAIChunk(eventData, id, model) {
1782
1782
  ],
1783
1783
  usage: void 0
1784
1784
  };
1785
+ case "nodeFinish":
1786
+ return {
1787
+ id,
1788
+ created: Math.floor(Date.now() / 1e3),
1789
+ model,
1790
+ choices: [
1791
+ {
1792
+ delta: {
1793
+ role: "assistant",
1794
+ content: (_b = eventData.delta) != null ? _b : "",
1795
+ tool_calls: void 0
1796
+ },
1797
+ finish_reason: null,
1798
+ index: 0
1799
+ }
1800
+ ],
1801
+ usage: void 0
1802
+ };
1785
1803
  case "done":
1786
1804
  return {
1787
1805
  id,
@@ -1791,14 +1809,14 @@ function mapRivetEventToOpenAIChunk(eventData, id, model) {
1791
1809
  {
1792
1810
  delta: {
1793
1811
  role: "assistant",
1794
- content: (_d = (_c = (_b = eventData.graphOutput) == null ? void 0 : _b.response) == null ? void 0 : _c.value) != null ? _d : "",
1812
+ content: (_e = (_d = (_c = eventData.graphOutput) == null ? void 0 : _c.response) == null ? void 0 : _d.value) != null ? _e : "",
1795
1813
  tool_calls: void 0
1796
1814
  },
1797
1815
  finish_reason: "stop",
1798
1816
  index: 0
1799
1817
  }
1800
1818
  ],
1801
- usage: toOpenAIUsage((_g = (_f = (_e = eventData.graphOutput.usages) == null ? void 0 : _e.value) == null ? void 0 : _f[0]) == null ? void 0 : _g.value)
1819
+ usage: toOpenAIUsage((_h = (_g = (_f = eventData.graphOutput.usages) == null ? void 0 : _f.value) == null ? void 0 : _g[0]) == null ? void 0 : _h.value)
1802
1820
  };
1803
1821
  default:
1804
1822
  return {