@supernovae-st/nika-client 0.63.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +661 -0
- package/README.md +263 -0
- package/dist/index.cjs +675 -0
- package/dist/index.d.cts +274 -0
- package/dist/index.d.ts +274 -0
- package/dist/index.js +639 -0
- package/package.json +44 -0
package/README.md
ADDED
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
# @supernovae-st/nika-client
|
|
2
|
+
|
|
3
|
+
TypeScript client for the [Nika](https://github.com/SuperNovae-studio/nika) workflow engine HTTP API (`nika serve`).
|
|
4
|
+
|
|
5
|
+
- Zero runtime dependencies (uses native `fetch`)
|
|
6
|
+
- Full TypeScript types aligned with nika serve OpenAPI 3.1 spec
|
|
7
|
+
- Namespace pattern: `nika.jobs.*`, `nika.workflows.*`
|
|
8
|
+
- 6 typed error classes with full hierarchy
|
|
9
|
+
- Automatic retry on 429/5xx with exponential backoff
|
|
10
|
+
- Client-side concurrency limiter (semaphore, default: 24)
|
|
11
|
+
- SSE streaming via `AsyncIterable` with idle timeout + auto-reconnect
|
|
12
|
+
- Binary artifact download (`Uint8Array`) + streaming (`ReadableStream`)
|
|
13
|
+
- Auto-paginating workflow listing
|
|
14
|
+
- AbortSignal support on long-running operations
|
|
15
|
+
- Webhook HMAC-SHA256 verification (async, Web Crypto API)
|
|
16
|
+
- Dual CJS/ESM build
|
|
17
|
+
- Node.js 18+
|
|
18
|
+
|
|
19
|
+
## Install
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
npm install @supernovae-st/nika-client
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
## Quick start
|
|
26
|
+
|
|
27
|
+
```typescript
|
|
28
|
+
import { Nika } from '@supernovae-st/nika-client';
|
|
29
|
+
|
|
30
|
+
const nika = new Nika({
|
|
31
|
+
url: 'http://localhost:3000',
|
|
32
|
+
token: process.env.NIKA_TOKEN!,
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
// Submit + poll until done
|
|
36
|
+
const job = await nika.jobs.run('translate.nika.yaml', {
|
|
37
|
+
file: 'ui.json',
|
|
38
|
+
locale: 'fr_FR',
|
|
39
|
+
});
|
|
40
|
+
console.log(job.status); // 'completed'
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
## Usage
|
|
44
|
+
|
|
45
|
+
### Run a workflow and wait for completion
|
|
46
|
+
|
|
47
|
+
```typescript
|
|
48
|
+
const job = await nika.jobs.run('pipeline.nika.yaml', { topic: 'AI' });
|
|
49
|
+
console.log(job.status); // 'completed'
|
|
50
|
+
console.log(job.exit_code); // 0
|
|
51
|
+
console.log(job.completed_at); // '2026-04-02T10:01:00Z'
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
### Stream events in real time (SSE)
|
|
55
|
+
|
|
56
|
+
```typescript
|
|
57
|
+
const { job_id } = await nika.jobs.submit('pipeline.nika.yaml', { topic: 'AI' });
|
|
58
|
+
|
|
59
|
+
for await (const event of nika.jobs.stream(job_id)) {
|
|
60
|
+
console.log(event.type, event.task_id ?? '', event.duration_ms ?? '');
|
|
61
|
+
// started
|
|
62
|
+
// task_start research infer
|
|
63
|
+
// task_complete research 1200
|
|
64
|
+
// completed
|
|
65
|
+
}
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
### Run and collect all artifacts
|
|
69
|
+
|
|
70
|
+
```typescript
|
|
71
|
+
const artifacts = await nika.jobs.runAndCollect('research.nika.yaml', {
|
|
72
|
+
topic: 'workflow engines',
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
console.log(artifacts['report.md']); // markdown string
|
|
76
|
+
console.log(artifacts['data.json']); // parsed JSON object
|
|
77
|
+
// binary artifacts (audio, images) are skipped
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
### Download binary artifacts
|
|
81
|
+
|
|
82
|
+
```typescript
|
|
83
|
+
const bytes = await nika.jobs.artifactBinary('job-id', 'audio.mp3');
|
|
84
|
+
// bytes is Uint8Array
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
### Stream large artifacts without loading into memory
|
|
88
|
+
|
|
89
|
+
```typescript
|
|
90
|
+
import * as fs from 'node:fs';
|
|
91
|
+
|
|
92
|
+
const stream = await nika.jobs.artifactStream('job-id', 'dataset.csv');
|
|
93
|
+
const writer = fs.createWriteStream('output.csv');
|
|
94
|
+
for await (const chunk of stream) {
|
|
95
|
+
writer.write(chunk);
|
|
96
|
+
}
|
|
97
|
+
writer.end();
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
### Paginate workflow listing
|
|
101
|
+
|
|
102
|
+
```typescript
|
|
103
|
+
// Auto-pagination (default) — fetches all pages transparently
|
|
104
|
+
const all = await nika.workflows.list();
|
|
105
|
+
|
|
106
|
+
// Manual pagination for large lists
|
|
107
|
+
const page1 = await nika.workflows.listPage({ limit: 50 });
|
|
108
|
+
if (page1.has_more) {
|
|
109
|
+
const last = page1.workflows[page1.workflows.length - 1].name;
|
|
110
|
+
const page2 = await nika.workflows.listPage({ limit: 50, after: last });
|
|
111
|
+
}
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
### Cancel a running job with AbortSignal
|
|
115
|
+
|
|
116
|
+
```typescript
|
|
117
|
+
const controller = new AbortController();
|
|
118
|
+
setTimeout(() => controller.abort(), 60_000);
|
|
119
|
+
|
|
120
|
+
const job = await nika.jobs.run('slow.nika.yaml', {}, {
|
|
121
|
+
signal: controller.signal,
|
|
122
|
+
});
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
### Custom fetch (logging middleware)
|
|
126
|
+
|
|
127
|
+
```typescript
|
|
128
|
+
const nika = new Nika({
|
|
129
|
+
url: 'http://localhost:3000',
|
|
130
|
+
token: process.env.NIKA_TOKEN!,
|
|
131
|
+
fetch: async (url, init) => {
|
|
132
|
+
console.log(`>> ${init?.method ?? 'GET'} ${url}`);
|
|
133
|
+
const res = await fetch(url, init);
|
|
134
|
+
console.log(`<< ${res.status}`);
|
|
135
|
+
return res;
|
|
136
|
+
},
|
|
137
|
+
});
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
### Webhook verification
|
|
141
|
+
|
|
142
|
+
```typescript
|
|
143
|
+
import { Nika } from '@supernovae-st/nika-client';
|
|
144
|
+
|
|
145
|
+
// Stripe-style HMAC-SHA256 verification (async — uses Web Crypto API)
|
|
146
|
+
// Works in Node.js 18+, Deno, Cloudflare Workers, and Bun.
|
|
147
|
+
const isValid = await Nika.verifyWebhook(
|
|
148
|
+
rawBody,
|
|
149
|
+
signatureHeader, // 't=1234567890,v1=abc123...'
|
|
150
|
+
webhookSecret,
|
|
151
|
+
);
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
## Configuration
|
|
155
|
+
|
|
156
|
+
### `new Nika(config)`
|
|
157
|
+
|
|
158
|
+
| Option | Type | Default | Description |
|
|
159
|
+
|--------|------|---------|-------------|
|
|
160
|
+
| `url` | `string` | (required) | nika serve URL (http/https) |
|
|
161
|
+
| `token` | `string` | (required) | Bearer token (`NIKA_SERVE_TOKEN`) |
|
|
162
|
+
| `timeout` | `number` | `30000` | HTTP request timeout in ms |
|
|
163
|
+
| `retries` | `number` | `2` | Retries on 429/5xx |
|
|
164
|
+
| `concurrency` | `number` | `24` | Max concurrent HTTP requests |
|
|
165
|
+
| `pollInterval` | `number` | `2000` | Initial poll interval in ms |
|
|
166
|
+
| `pollTimeout` | `number` | `300000` | Max poll duration in ms |
|
|
167
|
+
| `pollBackoff` | `number` | `1.5` | Poll backoff multiplier |
|
|
168
|
+
| `fetch` | `typeof fetch` | `globalThis.fetch` | Custom fetch function |
|
|
169
|
+
| `logger` | `NikaLogger` | (none) | Logger interface (`debug`, `info`, `warn`, `error`) |
|
|
170
|
+
|
|
171
|
+
## API reference
|
|
172
|
+
|
|
173
|
+
### Jobs — `nika.jobs.*`
|
|
174
|
+
|
|
175
|
+
| Method | Returns | Description |
|
|
176
|
+
|--------|---------|-------------|
|
|
177
|
+
| `submit(workflow, inputs?, opts?)` | `RunResponse` | Submit workflow, return `{ job_id, status }` |
|
|
178
|
+
| `status(jobId)` | `NikaJob` | Get job status |
|
|
179
|
+
| `cancel(jobId)` | `CancelResponse` | Cancel a running job |
|
|
180
|
+
| `run(workflow, inputs?, opts?)` | `NikaJob` | Submit + poll until terminal state |
|
|
181
|
+
| `stream(jobId, opts?)` | `AsyncIterable<NikaEvent>` | SSE event stream |
|
|
182
|
+
| `artifacts(jobId)` | `NikaArtifact[]` | List job artifacts |
|
|
183
|
+
| `artifact(jobId, name)` | `string` | Download artifact as text |
|
|
184
|
+
| `artifactJson<T>(jobId, name)` | `T` | Download artifact as parsed JSON |
|
|
185
|
+
| `artifactBinary(jobId, name)` | `Uint8Array` | Download artifact as raw bytes |
|
|
186
|
+
| `artifactStream(jobId, name)` | `ReadableStream<Uint8Array>` | Stream artifact (for large files) |
|
|
187
|
+
| `runAndCollect(workflow, inputs?, opts?)` | `Record<string, unknown>` | Run + collect all non-binary artifacts |
|
|
188
|
+
|
|
189
|
+
### Workflows — `nika.workflows.*`
|
|
190
|
+
|
|
191
|
+
| Method | Returns | Description |
|
|
192
|
+
|--------|---------|-------------|
|
|
193
|
+
| `list()` | `WorkflowInfo[]` | List all workflows (auto-paginates) |
|
|
194
|
+
| `listPage(opts?)` | `ListWorkflowsResponse` | List single page (manual pagination) |
|
|
195
|
+
| `reload()` | `WorkflowInfo[]` | Rescan workflows directory |
|
|
196
|
+
| `source(name)` | `string` | Get raw YAML source |
|
|
197
|
+
|
|
198
|
+
### System
|
|
199
|
+
|
|
200
|
+
| Method | Returns | Description |
|
|
201
|
+
|--------|---------|-------------|
|
|
202
|
+
| `nika.health()` | `HealthResponse` | Health check (no auth required) |
|
|
203
|
+
| `Nika.verifyWebhook(body, sig, secret)` | `boolean` | Static: verify webhook HMAC-SHA256 |
|
|
204
|
+
|
|
205
|
+
## Error classes
|
|
206
|
+
|
|
207
|
+
All SDK errors extend `NikaError`. Catch it to handle any SDK error:
|
|
208
|
+
|
|
209
|
+
```
|
|
210
|
+
NikaError (base)
|
|
211
|
+
├── NikaAPIError — HTTP errors (status, body, requestId)
|
|
212
|
+
├── NikaConnectionError — Network errors (DNS, TCP, abort)
|
|
213
|
+
├── NikaTimeoutError — Request or poll timeout
|
|
214
|
+
└── NikaJobError — Job failed (exitCode, job object)
|
|
215
|
+
└── NikaJobCancelledError — Job was cancelled
|
|
216
|
+
```
|
|
217
|
+
|
|
218
|
+
```typescript
|
|
219
|
+
import { NikaError, NikaAPIError, NikaJobError } from '@supernovae-st/nika-client';
|
|
220
|
+
|
|
221
|
+
try {
|
|
222
|
+
await nika.jobs.run('pipeline.nika.yaml');
|
|
223
|
+
} catch (err) {
|
|
224
|
+
if (err instanceof NikaJobError) {
|
|
225
|
+
console.error('Job failed:', err.job.output, 'exit:', err.exitCode);
|
|
226
|
+
} else if (err instanceof NikaAPIError) {
|
|
227
|
+
console.error('HTTP error:', err.status, err.body);
|
|
228
|
+
} else if (err instanceof NikaError) {
|
|
229
|
+
console.error('SDK error:', err.message);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
```
|
|
233
|
+
|
|
234
|
+
## SSE event types
|
|
235
|
+
|
|
236
|
+
| Type | Fields | Terminal |
|
|
237
|
+
|------|--------|----------|
|
|
238
|
+
| `started` | `job_id` | No |
|
|
239
|
+
| `task_start` | `job_id, task_id, verb` | No |
|
|
240
|
+
| `task_complete` | `job_id, task_id, duration_ms` | No |
|
|
241
|
+
| `task_failed` | `job_id, task_id, error, duration_ms` | No |
|
|
242
|
+
| `artifact_written` | `job_id, task_id, path, size` | No |
|
|
243
|
+
| `completed` | `job_id, output?` | Yes |
|
|
244
|
+
| `failed` | `job_id, error?` | Yes |
|
|
245
|
+
| `cancelled` | `job_id` | Yes |
|
|
246
|
+
|
|
247
|
+
Terminal events close the SSE stream automatically.
|
|
248
|
+
|
|
249
|
+
The SDK auto-reconnects on stream drops (up to 3 attempts), using the `Last-Event-Id` header to resume without losing events. Configure via `StreamOptions`:
|
|
250
|
+
|
|
251
|
+
```typescript
|
|
252
|
+
for await (const event of nika.jobs.stream(jobId, {
|
|
253
|
+
maxReconnects: 5,
|
|
254
|
+
reconnectDelay: 2000,
|
|
255
|
+
idleTimeout: 120_000,
|
|
256
|
+
})) {
|
|
257
|
+
// events are guaranteed in order, even across reconnects
|
|
258
|
+
}
|
|
259
|
+
```
|
|
260
|
+
|
|
261
|
+
## License
|
|
262
|
+
|
|
263
|
+
[AGPL-3.0-or-later](LICENSE)
|