@voltx/ai 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +135 -0
- package/dist/index.cjs +972 -0
- package/dist/index.d.cts +326 -0
- package/dist/index.d.ts +326 -0
- package/dist/index.js +925 -0
- package/package.json +38 -0
package/README.md
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
<p align="center">
|
|
2
|
+
<strong>@voltx/ai</strong><br/>
|
|
3
|
+
<em>Unified LLM provider abstraction with streaming, tool calling, and structured output</em>
|
|
4
|
+
</p>
|
|
5
|
+
|
|
6
|
+
<p align="center">
|
|
7
|
+
<a href="https://www.npmjs.com/package/@voltx/ai"><img src="https://img.shields.io/npm/v/@voltx/ai?color=blue" alt="npm" /></a>
|
|
8
|
+
<a href="https://www.npmjs.com/package/@voltx/ai"><img src="https://img.shields.io/npm/dm/@voltx/ai" alt="downloads" /></a>
|
|
9
|
+
<a href="https://github.com/codewithshail/voltx/blob/main/LICENSE"><img src="https://img.shields.io/npm/l/@voltx/ai" alt="license" /></a>
|
|
10
|
+
</p>
|
|
11
|
+
|
|
12
|
+
---
|
|
13
|
+
|
|
14
|
+
One API for every LLM. Part of the [VoltX](https://github.com/codewithshail/voltx) framework.
|
|
15
|
+
|
|
16
|
+
Write your AI code once, switch providers with a single string change. Supports text generation, streaming, tool calling, structured output, and embeddings.
|
|
17
|
+
|
|
18
|
+
## Installation
|
|
19
|
+
|
|
20
|
+
```bash
|
|
21
|
+
npm install @voltx/ai
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
## Supported Providers
|
|
25
|
+
|
|
26
|
+
| Provider | Chat | Streaming | Tool Calling | Embeddings |
|
|
27
|
+
|----------|------|-----------|-------------|------------|
|
|
28
|
+
| OpenAI | ✅ | ✅ | ✅ | ✅ |
|
|
29
|
+
| Anthropic | ✅ | ✅ | ✅ | — |
|
|
30
|
+
| Google Gemini | ✅ | ✅ | ✅ | ✅ |
|
|
31
|
+
| Cerebras | ✅ | ✅ | ✅ | — |
|
|
32
|
+
| OpenRouter | ✅ | ✅ | ✅ | — |
|
|
33
|
+
| Ollama | ✅ | ✅ | ✅ | ✅ |
|
|
34
|
+
|
|
35
|
+
## Quick Start
|
|
36
|
+
|
|
37
|
+
### Generate Text
|
|
38
|
+
|
|
39
|
+
```ts
|
|
40
|
+
import { generateText } from "@voltx/ai";
|
|
41
|
+
|
|
42
|
+
const { text } = await generateText({
|
|
43
|
+
model: "openai:gpt-4o",
|
|
44
|
+
prompt: "Explain TypeScript in one sentence.",
|
|
45
|
+
});
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
### Stream Text (SSE)
|
|
49
|
+
|
|
50
|
+
```ts
|
|
51
|
+
import { streamText } from "@voltx/ai";
|
|
52
|
+
|
|
53
|
+
const result = await streamText({
|
|
54
|
+
model: "cerebras:llama-4-scout-17b-16e",
|
|
55
|
+
system: "You are a helpful assistant.",
|
|
56
|
+
messages: [{ role: "user", content: "Hello!" }],
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
// Use in an HTTP endpoint
|
|
60
|
+
return result.toSSEResponse();
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
### Structured Output
|
|
64
|
+
|
|
65
|
+
```ts
|
|
66
|
+
import { generateObject } from "@voltx/ai";
|
|
67
|
+
import { z } from "zod";
|
|
68
|
+
|
|
69
|
+
const { object } = await generateObject({
|
|
70
|
+
model: "openai:gpt-4o",
|
|
71
|
+
prompt: "Generate a recipe for pasta.",
|
|
72
|
+
schema: z.object({
|
|
73
|
+
name: z.string(),
|
|
74
|
+
ingredients: z.array(z.string()),
|
|
75
|
+
steps: z.array(z.string()),
|
|
76
|
+
}),
|
|
77
|
+
});
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
### Embeddings
|
|
81
|
+
|
|
82
|
+
```ts
|
|
83
|
+
import { embed, embedMany } from "@voltx/ai";
|
|
84
|
+
|
|
85
|
+
const { embedding } = await embed({
|
|
86
|
+
model: "openai:text-embedding-3-small",
|
|
87
|
+
value: "What is TypeScript?",
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
const { embeddings } = await embedMany({
|
|
91
|
+
model: "openai:text-embedding-3-small",
|
|
92
|
+
values: ["Hello", "World"],
|
|
93
|
+
});
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
### Provider Shorthands
|
|
98
|
+
|
|
99
|
+
```ts
|
|
100
|
+
import { openai, anthropic, cerebras, google, ollama } from "@voltx/ai";
|
|
101
|
+
|
|
102
|
+
// These are equivalent:
|
|
103
|
+
const result1 = await generateText({ model: "openai:gpt-4o", prompt: "Hi" });
|
|
104
|
+
const result2 = await generateText({ model: openai("gpt-4o"), prompt: "Hi" });
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
## API Reference
|
|
108
|
+
|
|
109
|
+
| Function | Description |
|
|
110
|
+
|----------|-------------|
|
|
111
|
+
| `generateText()` | Single LLM completion |
|
|
112
|
+
| `streamText()` | Streaming response with SSE helpers |
|
|
113
|
+
| `generateObject()` | Structured JSON output with Zod schema |
|
|
114
|
+
| `embed()` | Single text embedding |
|
|
115
|
+
| `embedMany()` | Batch text embeddings |
|
|
116
|
+
|
|
117
|
+
## Environment Variables
|
|
118
|
+
|
|
119
|
+
Set the API key for your provider:
|
|
120
|
+
|
|
121
|
+
```env
|
|
122
|
+
OPENAI_API_KEY=sk-...
|
|
123
|
+
ANTHROPIC_API_KEY=sk-ant-...
|
|
124
|
+
GOOGLE_AI_API_KEY=AIza...
|
|
125
|
+
CEREBRAS_API_KEY=csk-...
|
|
126
|
+
OPENROUTER_API_KEY=sk-or-...
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
## Part of VoltX
|
|
130
|
+
|
|
131
|
+
This package is part of the [VoltX](https://github.com/codewithshail/voltx) framework. See the [monorepo](https://github.com/codewithshail/voltx) for full documentation.
|
|
132
|
+
|
|
133
|
+
## License
|
|
134
|
+
|
|
135
|
+
[MIT](https://github.com/codewithshail/voltx/blob/main/LICENSE) — Made by the [Promptly AI Team](https://buymeacoffee.com/promptlyai)
|