@inferencesh/sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +34 -0
- package/LICENSE +22 -0
- package/README.md +216 -0
- package/dist/client.d.ts +77 -0
- package/dist/client.js +249 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +23 -0
- package/dist/index.mjs +1 -0
- package/dist/stream.d.ts +29 -0
- package/dist/stream.js +120 -0
- package/dist/types.d.ts +3019 -0
- package/dist/types.js +257 -0
- package/package.json +77 -0
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# Changelog
|
|
2
|
+
|
|
3
|
+
All notable changes to this project will be documented in this file.
|
|
4
|
+
|
|
5
|
+
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
6
|
+
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
|
+
|
|
8
|
+
## [Unreleased]
|
|
9
|
+
|
|
10
|
+
## [0.1.0] - 2024-01-XX
|
|
11
|
+
|
|
12
|
+
### Added
|
|
13
|
+
|
|
14
|
+
- Initial release
|
|
15
|
+
- `Inference` client class for API communication
|
|
16
|
+
- `run()` method for executing tasks with optional waiting
|
|
17
|
+
- `cancel()` method for cancelling running tasks
|
|
18
|
+
- `uploadFile()` method for file uploads (base64, data URI, Blob)
|
|
19
|
+
- Real-time status updates via `onUpdate` callback
|
|
20
|
+
- Automatic reconnection for streaming connections
|
|
21
|
+
- Full TypeScript support with exported types
|
|
22
|
+
- Task status constants (`TaskStatusCompleted`, `TaskStatusFailed`, etc.)
|
|
23
|
+
|
|
24
|
+
### Features
|
|
25
|
+
|
|
26
|
+
- Simple, promise-based API
|
|
27
|
+
- Streaming status updates via Server-Sent Events
|
|
28
|
+
- Automatic file upload handling in task inputs
|
|
29
|
+
- Configurable reconnection behavior
|
|
30
|
+
- Comprehensive error handling
|
|
31
|
+
|
|
32
|
+
[Unreleased]: https://github.com/inference-sh/sdk-js/compare/v0.1.0...HEAD
|
|
33
|
+
[0.1.0]: https://github.com/inference-sh/sdk-js/releases/tag/v0.1.0
|
|
34
|
+
|
package/LICENSE
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 inference.sh
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
22
|
+
|
package/README.md
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
# @inferencesh/sdk
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/@inferencesh/sdk)
|
|
4
|
+
[](https://www.npmjs.com/package/@inferencesh/sdk)
|
|
5
|
+
[](https://opensource.org/licenses/MIT)
|
|
6
|
+
[](https://www.typescriptlang.org/)
|
|
7
|
+
|
|
8
|
+
Official JavaScript/TypeScript SDK for [inference.sh](https://inference.sh) — Run AI models with a simple API.
|
|
9
|
+
|
|
10
|
+
## Installation
|
|
11
|
+
|
|
12
|
+
```bash
|
|
13
|
+
npm install @inferencesh/sdk
|
|
14
|
+
# or
|
|
15
|
+
yarn add @inferencesh/sdk
|
|
16
|
+
# or
|
|
17
|
+
pnpm add @inferencesh/sdk
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
## Quick Start
|
|
21
|
+
|
|
22
|
+
```typescript
|
|
23
|
+
import { Inference, TaskStatusCompleted } from '@inferencesh/sdk';
|
|
24
|
+
|
|
25
|
+
const client = new Inference({
|
|
26
|
+
apiKey: 'your-api-key'
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
// Run a task and wait for the result
|
|
30
|
+
const result = await client.run({
|
|
31
|
+
app: 'your-app',
|
|
32
|
+
input: {
|
|
33
|
+
prompt: 'Hello, world!'
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
console.log(result.output);
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
## Usage
|
|
41
|
+
|
|
42
|
+
### Basic Usage
|
|
43
|
+
|
|
44
|
+
```typescript
|
|
45
|
+
import { Inference } from '@inferencesh/sdk';
|
|
46
|
+
|
|
47
|
+
const client = new Inference({ apiKey: 'your-api-key' });
|
|
48
|
+
|
|
49
|
+
// Wait for result (default behavior)
|
|
50
|
+
const result = await client.run({
|
|
51
|
+
app: 'my-app',
|
|
52
|
+
input: { prompt: 'Generate something amazing' }
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
console.log('Output:', result.output);
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
### Fire and Forget
|
|
59
|
+
|
|
60
|
+
```typescript
|
|
61
|
+
// Get task info immediately without waiting
|
|
62
|
+
const task = await client.run(
|
|
63
|
+
{ app: 'my-app', input: { prompt: 'hello' } },
|
|
64
|
+
{ wait: false }
|
|
65
|
+
);
|
|
66
|
+
|
|
67
|
+
console.log('Task ID:', task.id);
|
|
68
|
+
console.log('Status:', task.status);
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
### Real-time Status Updates
|
|
72
|
+
|
|
73
|
+
```typescript
|
|
74
|
+
const result = await client.run(
|
|
75
|
+
{ app: 'my-app', input: { prompt: 'hello' } },
|
|
76
|
+
{
|
|
77
|
+
onUpdate: (update) => {
|
|
78
|
+
console.log('Status:', update.status);
|
|
79
|
+
console.log('Progress:', update.logs);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
);
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
### Batch Processing
|
|
86
|
+
|
|
87
|
+
```typescript
|
|
88
|
+
async function processImages(images: string[]) {
|
|
89
|
+
const results = [];
|
|
90
|
+
|
|
91
|
+
for (const image of images) {
|
|
92
|
+
const result = await client.run({
|
|
93
|
+
app: 'image-processor',
|
|
94
|
+
input: { image }
|
|
95
|
+
}, {
|
|
96
|
+
onUpdate: (update) => console.log(`Processing: ${update.status}`)
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
results.push(result);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
return results;
|
|
103
|
+
}
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
### File Upload
|
|
107
|
+
|
|
108
|
+
```typescript
|
|
109
|
+
// Upload from base64
|
|
110
|
+
const file = await client.uploadFile('data:image/png;base64,...', {
|
|
111
|
+
filename: 'image.png',
|
|
112
|
+
contentType: 'image/png'
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
// Use the uploaded file in a task
|
|
116
|
+
const result = await client.run({
|
|
117
|
+
app: 'image-app',
|
|
118
|
+
input: { image: file.uri }
|
|
119
|
+
});
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
### Cancel a Task
|
|
123
|
+
|
|
124
|
+
```typescript
|
|
125
|
+
const task = await client.run(
|
|
126
|
+
{ app: 'long-running-app', input: {} },
|
|
127
|
+
{ wait: false }
|
|
128
|
+
);
|
|
129
|
+
|
|
130
|
+
// Cancel if needed
|
|
131
|
+
await client.cancel(task.id);
|
|
132
|
+
```
|
|
133
|
+
|
|
134
|
+
## API Reference
|
|
135
|
+
|
|
136
|
+
### `new Inference(config)`
|
|
137
|
+
|
|
138
|
+
Creates a new Inference client.
|
|
139
|
+
|
|
140
|
+
| Parameter | Type | Required | Description |
|
|
141
|
+
|-----------|------|----------|-------------|
|
|
142
|
+
| `config.apiKey` | `string` | Yes | Your inference.sh API key |
|
|
143
|
+
| `config.baseUrl` | `string` | No | Custom API URL (default: `https://api.inference.sh`) |
|
|
144
|
+
|
|
145
|
+
### `client.run(params, options?)`
|
|
146
|
+
|
|
147
|
+
Runs a task on inference.sh.
|
|
148
|
+
|
|
149
|
+
**Parameters:**
|
|
150
|
+
|
|
151
|
+
| Parameter | Type | Required | Description |
|
|
152
|
+
|-----------|------|----------|-------------|
|
|
153
|
+
| `params.app` | `string` | Yes | App identifier (e.g., `'username/app-name'`) |
|
|
154
|
+
| `params.input` | `object` | Yes | Input parameters for the app |
|
|
155
|
+
| `params.infra` | `string` | No | Infrastructure: `'cloud'` or `'private'` |
|
|
156
|
+
| `params.variant` | `string` | No | App variant to use |
|
|
157
|
+
|
|
158
|
+
**Options:**
|
|
159
|
+
|
|
160
|
+
| Option | Type | Default | Description |
|
|
161
|
+
|--------|------|---------|-------------|
|
|
162
|
+
| `wait` | `boolean` | `true` | Wait for task completion |
|
|
163
|
+
| `onUpdate` | `function` | - | Callback for status updates |
|
|
164
|
+
| `autoReconnect` | `boolean` | `true` | Auto-reconnect on connection loss |
|
|
165
|
+
| `maxReconnects` | `number` | `5` | Max reconnection attempts |
|
|
166
|
+
| `reconnectDelayMs` | `number` | `1000` | Delay between reconnects (ms) |
|
|
167
|
+
|
|
168
|
+
### `client.cancel(taskId)`
|
|
169
|
+
|
|
170
|
+
Cancels a running task.
|
|
171
|
+
|
|
172
|
+
### `client.uploadFile(data, options?)`
|
|
173
|
+
|
|
174
|
+
Uploads a file to inference.sh.
|
|
175
|
+
|
|
176
|
+
**Parameters:**
|
|
177
|
+
|
|
178
|
+
| Parameter | Type | Description |
|
|
179
|
+
|-----------|------|-------------|
|
|
180
|
+
| `data` | `string \| Blob` | Base64 string, data URI, or Blob |
|
|
181
|
+
| `options.filename` | `string` | Filename |
|
|
182
|
+
| `options.contentType` | `string` | MIME type |
|
|
183
|
+
| `options.public` | `boolean` | Make file publicly accessible |
|
|
184
|
+
|
|
185
|
+
## Task Status Constants
|
|
186
|
+
|
|
187
|
+
```typescript
|
|
188
|
+
import {
|
|
189
|
+
TaskStatusQueued,
|
|
190
|
+
TaskStatusRunning,
|
|
191
|
+
TaskStatusCompleted,
|
|
192
|
+
TaskStatusFailed,
|
|
193
|
+
TaskStatusCancelled
|
|
194
|
+
} from '@inferencesh/sdk';
|
|
195
|
+
|
|
196
|
+
if (task.status === TaskStatusCompleted) {
|
|
197
|
+
console.log('Done!');
|
|
198
|
+
}
|
|
199
|
+
```
|
|
200
|
+
|
|
201
|
+
## TypeScript Support
|
|
202
|
+
|
|
203
|
+
This SDK is written in TypeScript and includes full type definitions. All types are exported:
|
|
204
|
+
|
|
205
|
+
```typescript
|
|
206
|
+
import type { Task, ApiTaskRequest, RunOptions } from '@inferencesh/sdk';
|
|
207
|
+
```
|
|
208
|
+
|
|
209
|
+
## Requirements
|
|
210
|
+
|
|
211
|
+
- Node.js 18.0.0 or higher
|
|
212
|
+
- Modern browsers with `fetch` support
|
|
213
|
+
|
|
214
|
+
## License
|
|
215
|
+
|
|
216
|
+
MIT © [inference.sh](https://inference.sh)
|
package/dist/client.d.ts
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { ApiTaskRequest, TaskDTO as Task, File } from './types';
|
|
2
|
+
export interface UploadFileOptions {
|
|
3
|
+
filename?: string;
|
|
4
|
+
contentType?: string;
|
|
5
|
+
path?: string;
|
|
6
|
+
public?: boolean;
|
|
7
|
+
}
|
|
8
|
+
export interface InferenceConfig {
|
|
9
|
+
/** Your inference.sh API key */
|
|
10
|
+
apiKey: string;
|
|
11
|
+
/** Custom API base URL (defaults to https://api.inference.sh) */
|
|
12
|
+
baseUrl?: string;
|
|
13
|
+
}
|
|
14
|
+
export interface RunOptions {
|
|
15
|
+
/** Callback for real-time status updates */
|
|
16
|
+
onUpdate?: (update: Task) => void;
|
|
17
|
+
/** Wait for task completion (default: true) */
|
|
18
|
+
wait?: boolean;
|
|
19
|
+
/** Auto-reconnect on connection loss (default: true) */
|
|
20
|
+
autoReconnect?: boolean;
|
|
21
|
+
/** Maximum reconnection attempts (default: 5) */
|
|
22
|
+
maxReconnects?: number;
|
|
23
|
+
/** Delay between reconnection attempts in ms (default: 1000) */
|
|
24
|
+
reconnectDelayMs?: number;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Inference.sh SDK Client
|
|
28
|
+
*
|
|
29
|
+
* @example
|
|
30
|
+
* ```typescript
|
|
31
|
+
* const client = new Inference({ apiKey: 'your-api-key' });
|
|
32
|
+
* const result = await client.run({ app: 'my-app', input: { prompt: 'hello' } });
|
|
33
|
+
* ```
|
|
34
|
+
*/
|
|
35
|
+
export declare class Inference {
|
|
36
|
+
private readonly apiKey;
|
|
37
|
+
private readonly baseUrl;
|
|
38
|
+
constructor(config: InferenceConfig);
|
|
39
|
+
private request;
|
|
40
|
+
private createEventSource;
|
|
41
|
+
private _stripTask;
|
|
42
|
+
private processInputData;
|
|
43
|
+
/**
|
|
44
|
+
* Run a task on inference.sh
|
|
45
|
+
*
|
|
46
|
+
* @param params - Task parameters including app and input
|
|
47
|
+
* @param options - Run options for waiting, updates, and reconnection
|
|
48
|
+
* @returns The completed task result
|
|
49
|
+
*
|
|
50
|
+
* @example
|
|
51
|
+
* ```typescript
|
|
52
|
+
* // Simple usage - wait for result
|
|
53
|
+
* const result = await client.run({ app: 'my-app', input: { prompt: 'hello' } });
|
|
54
|
+
*
|
|
55
|
+
* // With status updates
|
|
56
|
+
* const result = await client.run(
|
|
57
|
+
* { app: 'my-app', input: { prompt: 'hello' } },
|
|
58
|
+
* { onUpdate: (update) => console.log(update.status) }
|
|
59
|
+
* );
|
|
60
|
+
*
|
|
61
|
+
* // Fire and forget
|
|
62
|
+
* const task = await client.run({ app: 'my-app', input: {} }, { wait: false });
|
|
63
|
+
* ```
|
|
64
|
+
*/
|
|
65
|
+
run(params: ApiTaskRequest, options?: RunOptions): Promise<Task>;
|
|
66
|
+
uploadFile(data: string | Blob, options?: UploadFileOptions): Promise<File>;
|
|
67
|
+
/**
|
|
68
|
+
* Cancel a running task
|
|
69
|
+
*
|
|
70
|
+
* @param taskId - The ID of the task to cancel
|
|
71
|
+
*/
|
|
72
|
+
cancel(taskId: string): Promise<void>;
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* @deprecated Use `Inference` instead. Will be removed in v1.0.0
|
|
76
|
+
*/
|
|
77
|
+
export declare const inference: typeof Inference;
|
package/dist/client.js
ADDED
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.inference = exports.Inference = void 0;
|
|
4
|
+
const types_1 = require("./types");
|
|
5
|
+
const stream_1 = require("./stream");
|
|
6
|
+
const eventsource_1 = require("eventsource");
|
|
7
|
+
/**
|
|
8
|
+
* Inference.sh SDK Client
|
|
9
|
+
*
|
|
10
|
+
* @example
|
|
11
|
+
* ```typescript
|
|
12
|
+
* const client = new Inference({ apiKey: 'your-api-key' });
|
|
13
|
+
* const result = await client.run({ app: 'my-app', input: { prompt: 'hello' } });
|
|
14
|
+
* ```
|
|
15
|
+
*/
|
|
16
|
+
class Inference {
|
|
17
|
+
constructor(config) {
|
|
18
|
+
if (!config.apiKey) {
|
|
19
|
+
throw new Error('API key is required');
|
|
20
|
+
}
|
|
21
|
+
this.apiKey = config.apiKey;
|
|
22
|
+
this.baseUrl = config.baseUrl || "https://api.inference.sh";
|
|
23
|
+
}
|
|
24
|
+
async request(method, endpoint, options = {}) {
|
|
25
|
+
const url = new URL(`${this.baseUrl}${endpoint}`);
|
|
26
|
+
if (options.params) {
|
|
27
|
+
Object.entries(options.params).forEach(([key, value]) => {
|
|
28
|
+
if (value !== undefined && value !== null) {
|
|
29
|
+
url.searchParams.append(key, String(value));
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
const headers = {
|
|
34
|
+
"Content-Type": "application/json",
|
|
35
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
36
|
+
};
|
|
37
|
+
const fetchOptions = {
|
|
38
|
+
method: method.toUpperCase(),
|
|
39
|
+
headers,
|
|
40
|
+
credentials: "include",
|
|
41
|
+
};
|
|
42
|
+
if (options.data) {
|
|
43
|
+
fetchOptions.body = JSON.stringify(options.data);
|
|
44
|
+
}
|
|
45
|
+
const response = await fetch(url.toString(), fetchOptions);
|
|
46
|
+
const data = await response.json();
|
|
47
|
+
if (!data.success) {
|
|
48
|
+
throw new Error(data.error?.message || "Request failed");
|
|
49
|
+
}
|
|
50
|
+
return data.data;
|
|
51
|
+
}
|
|
52
|
+
createEventSource(endpoint) {
|
|
53
|
+
const url = new URL(`${this.baseUrl}${endpoint}`);
|
|
54
|
+
return new eventsource_1.EventSource(url.toString(), {
|
|
55
|
+
fetch: (input, init) => fetch(input, {
|
|
56
|
+
...init,
|
|
57
|
+
headers: {
|
|
58
|
+
...init.headers,
|
|
59
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
60
|
+
},
|
|
61
|
+
}),
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
_stripTask(task) {
|
|
65
|
+
// Keep required fields and add stripped ones
|
|
66
|
+
return {
|
|
67
|
+
...task,
|
|
68
|
+
id: task.id,
|
|
69
|
+
created_at: task.created_at,
|
|
70
|
+
updated_at: task.updated_at,
|
|
71
|
+
input: task.input,
|
|
72
|
+
output: task.output,
|
|
73
|
+
logs: task.logs,
|
|
74
|
+
status: task.status,
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
async processInputData(input, path = 'root') {
|
|
78
|
+
if (!input) {
|
|
79
|
+
return input;
|
|
80
|
+
}
|
|
81
|
+
// Handle arrays
|
|
82
|
+
if (Array.isArray(input)) {
|
|
83
|
+
return Promise.all(input.map((item, idx) => this.processInputData(item, `${path}[${idx}]`)));
|
|
84
|
+
}
|
|
85
|
+
// Handle objects
|
|
86
|
+
if (typeof input === 'object') {
|
|
87
|
+
// Handle Blob
|
|
88
|
+
if (typeof Blob !== "undefined" && input instanceof Blob) {
|
|
89
|
+
const file = await this.uploadFile(input);
|
|
90
|
+
return file.uri;
|
|
91
|
+
}
|
|
92
|
+
// Recursively process object properties
|
|
93
|
+
const processed = {};
|
|
94
|
+
for (const [key, value] of Object.entries(input)) {
|
|
95
|
+
processed[key] = await this.processInputData(value, `${path}.${key}`);
|
|
96
|
+
}
|
|
97
|
+
return processed;
|
|
98
|
+
}
|
|
99
|
+
// Handle base64 strings or data URIs
|
|
100
|
+
if (typeof input === 'string' &&
|
|
101
|
+
(input.startsWith('data:') ||
|
|
102
|
+
/^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)?$/.test(input))) {
|
|
103
|
+
const file = await this.uploadFile(input);
|
|
104
|
+
return file.uri;
|
|
105
|
+
}
|
|
106
|
+
return input;
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Run a task on inference.sh
|
|
110
|
+
*
|
|
111
|
+
* @param params - Task parameters including app and input
|
|
112
|
+
* @param options - Run options for waiting, updates, and reconnection
|
|
113
|
+
* @returns The completed task result
|
|
114
|
+
*
|
|
115
|
+
* @example
|
|
116
|
+
* ```typescript
|
|
117
|
+
* // Simple usage - wait for result
|
|
118
|
+
* const result = await client.run({ app: 'my-app', input: { prompt: 'hello' } });
|
|
119
|
+
*
|
|
120
|
+
* // With status updates
|
|
121
|
+
* const result = await client.run(
|
|
122
|
+
* { app: 'my-app', input: { prompt: 'hello' } },
|
|
123
|
+
* { onUpdate: (update) => console.log(update.status) }
|
|
124
|
+
* );
|
|
125
|
+
*
|
|
126
|
+
* // Fire and forget
|
|
127
|
+
* const task = await client.run({ app: 'my-app', input: {} }, { wait: false });
|
|
128
|
+
* ```
|
|
129
|
+
*/
|
|
130
|
+
async run(params, options = {}) {
|
|
131
|
+
const { onUpdate, wait = true, autoReconnect = true, maxReconnects = 5, reconnectDelayMs = 1000, } = options;
|
|
132
|
+
// Process input data and upload any files
|
|
133
|
+
const processedInput = await this.processInputData(params.input);
|
|
134
|
+
const task = await this.request("post", "/run", {
|
|
135
|
+
data: {
|
|
136
|
+
...params,
|
|
137
|
+
input: processedInput
|
|
138
|
+
},
|
|
139
|
+
});
|
|
140
|
+
// Return immediately if not waiting
|
|
141
|
+
if (!wait) {
|
|
142
|
+
return this._stripTask(task);
|
|
143
|
+
}
|
|
144
|
+
// Wait for completion with optional updates
|
|
145
|
+
return new Promise((resolve, reject) => {
|
|
146
|
+
const streamManager = new stream_1.StreamManager({
|
|
147
|
+
createEventSource: async () => this.createEventSource(`/tasks/${task.id}/stream`),
|
|
148
|
+
autoReconnect,
|
|
149
|
+
maxReconnects,
|
|
150
|
+
reconnectDelayMs,
|
|
151
|
+
onData: (data) => {
|
|
152
|
+
// Strip and send update if callback provided
|
|
153
|
+
const stripped = this._stripTask(data);
|
|
154
|
+
onUpdate?.(stripped);
|
|
155
|
+
if (data.status === types_1.TaskStatusCompleted) {
|
|
156
|
+
streamManager.stop();
|
|
157
|
+
resolve(stripped);
|
|
158
|
+
}
|
|
159
|
+
else if (data.status === types_1.TaskStatusFailed) {
|
|
160
|
+
streamManager.stop();
|
|
161
|
+
reject(new Error(data.error || "task failed"));
|
|
162
|
+
}
|
|
163
|
+
else if (data.status === types_1.TaskStatusCancelled) {
|
|
164
|
+
streamManager.stop();
|
|
165
|
+
reject(new Error("task cancelled"));
|
|
166
|
+
}
|
|
167
|
+
},
|
|
168
|
+
onError: (error) => {
|
|
169
|
+
reject(error);
|
|
170
|
+
streamManager.stop();
|
|
171
|
+
},
|
|
172
|
+
});
|
|
173
|
+
streamManager.connect();
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
async uploadFile(data, options = {}) {
|
|
177
|
+
// Step 1: Create the file record
|
|
178
|
+
const fileRequest = {
|
|
179
|
+
uri: '', // Empty URI as it will be set by the server
|
|
180
|
+
filename: options.filename,
|
|
181
|
+
content_type: options.contentType || (data instanceof Blob ? data.type : 'application/octet-stream'),
|
|
182
|
+
path: options.path,
|
|
183
|
+
size: data instanceof Blob ? data.size : undefined,
|
|
184
|
+
};
|
|
185
|
+
const response = await this.request("post", "/files", {
|
|
186
|
+
data: {
|
|
187
|
+
files: [fileRequest]
|
|
188
|
+
}
|
|
189
|
+
});
|
|
190
|
+
const file = response[0];
|
|
191
|
+
// Step 2: Upload the file content to the provided upload_url
|
|
192
|
+
if (!file.upload_url) {
|
|
193
|
+
throw new Error('No upload URL provided by the server');
|
|
194
|
+
}
|
|
195
|
+
let contentToUpload;
|
|
196
|
+
if (data instanceof Blob) {
|
|
197
|
+
contentToUpload = data;
|
|
198
|
+
}
|
|
199
|
+
else {
|
|
200
|
+
// If it's a base64 string, convert it to a Blob
|
|
201
|
+
if (data.startsWith('data:')) {
|
|
202
|
+
const matches = data.match(/^data:([^;]+);base64,(.+)$/);
|
|
203
|
+
if (!matches) {
|
|
204
|
+
throw new Error('Invalid base64 data URI format');
|
|
205
|
+
}
|
|
206
|
+
const binaryStr = atob(matches[2]);
|
|
207
|
+
const bytes = new Uint8Array(binaryStr.length);
|
|
208
|
+
for (let i = 0; i < binaryStr.length; i++) {
|
|
209
|
+
bytes[i] = binaryStr.charCodeAt(i);
|
|
210
|
+
}
|
|
211
|
+
contentToUpload = new Blob([bytes], { type: matches[1] });
|
|
212
|
+
}
|
|
213
|
+
else {
|
|
214
|
+
// Assume it's a clean base64 string
|
|
215
|
+
const binaryStr = atob(data);
|
|
216
|
+
const bytes = new Uint8Array(binaryStr.length);
|
|
217
|
+
for (let i = 0; i < binaryStr.length; i++) {
|
|
218
|
+
bytes[i] = binaryStr.charCodeAt(i);
|
|
219
|
+
}
|
|
220
|
+
contentToUpload = new Blob([bytes], { type: options.contentType || 'application/octet-stream' });
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
// Upload to S3 using the signed URL
|
|
224
|
+
const uploadResponse = await fetch(file.upload_url, {
|
|
225
|
+
method: 'PUT',
|
|
226
|
+
body: contentToUpload,
|
|
227
|
+
headers: {
|
|
228
|
+
'Content-Type': contentToUpload.type,
|
|
229
|
+
}
|
|
230
|
+
});
|
|
231
|
+
if (!uploadResponse.ok) {
|
|
232
|
+
throw new Error(`Failed to upload file content: ${uploadResponse.statusText}`);
|
|
233
|
+
}
|
|
234
|
+
return file;
|
|
235
|
+
}
|
|
236
|
+
/**
|
|
237
|
+
* Cancel a running task
|
|
238
|
+
*
|
|
239
|
+
* @param taskId - The ID of the task to cancel
|
|
240
|
+
*/
|
|
241
|
+
async cancel(taskId) {
|
|
242
|
+
return this.request("post", `/tasks/${taskId}/cancel`);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
exports.Inference = Inference;
|
|
246
|
+
/**
|
|
247
|
+
* @deprecated Use `Inference` instead. Will be removed in v1.0.0
|
|
248
|
+
*/
|
|
249
|
+
exports.inference = Inference;
|
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
exports.inference = exports.Inference = void 0;
|
|
18
|
+
// Main client export
|
|
19
|
+
var client_1 = require("./client");
|
|
20
|
+
Object.defineProperty(exports, "Inference", { enumerable: true, get: function () { return client_1.Inference; } });
|
|
21
|
+
Object.defineProperty(exports, "inference", { enumerable: true, get: function () { return client_1.inference; } });
|
|
22
|
+
// Types - includes TaskStatus constants and all DTOs
|
|
23
|
+
__exportStar(require("./types"), exports);
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from "./index.js";
|
package/dist/stream.d.ts
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
export interface StreamManagerOptions<T> {
|
|
2
|
+
createEventSource: () => Promise<EventSource | null>;
|
|
3
|
+
autoReconnect?: boolean;
|
|
4
|
+
maxReconnects?: number;
|
|
5
|
+
reconnectDelayMs?: number;
|
|
6
|
+
onError?: (error: Error) => void;
|
|
7
|
+
onStart?: () => void;
|
|
8
|
+
onStop?: () => void;
|
|
9
|
+
onData?: (data: T) => void;
|
|
10
|
+
onYield?: (data: T) => void;
|
|
11
|
+
}
|
|
12
|
+
export declare class StreamManager<T> {
|
|
13
|
+
private options;
|
|
14
|
+
private eventSource;
|
|
15
|
+
private reconnectTimeout;
|
|
16
|
+
private stopTimeout;
|
|
17
|
+
private initialConnectionAttempts;
|
|
18
|
+
private isConnected;
|
|
19
|
+
private isStopped;
|
|
20
|
+
constructor(options: StreamManagerOptions<T>);
|
|
21
|
+
private clearTimeouts;
|
|
22
|
+
private closeEventSource;
|
|
23
|
+
private cleanup;
|
|
24
|
+
stopAfter(delayMs: number): void;
|
|
25
|
+
clearStopTimeout(): void;
|
|
26
|
+
stop(): void;
|
|
27
|
+
private scheduleReconnect;
|
|
28
|
+
connect(): Promise<void>;
|
|
29
|
+
}
|