@output.ai/output 0.1.0-dev.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -54
- package/package.json +14 -6
package/README.md
CHANGED
|
@@ -1,59 +1,12 @@
|
|
|
1
1
|
# @output.ai/output
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
Unified package for building durable LLM applications with the Output Framework.
|
|
4
4
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
```bash
|
|
8
|
-
npm install @output.ai/output
|
|
9
|
-
```
|
|
10
|
-
|
|
11
|
-
This single installation provides all Output SDK packages:
|
|
12
|
-
|
|
13
|
-
- `@output.ai/core` - Core workflow and activity abstractions
|
|
14
|
-
- `@output.ai/llm` - LLM integration with AI SDK
|
|
15
|
-
- `@output.ai/http` - HTTP client utilities
|
|
16
|
-
- `@output.ai/cli` - CLI tools for project scaffolding and workflow management
|
|
17
|
-
|
|
18
|
-
## Usage
|
|
19
|
-
|
|
20
|
-
Import directly from the individual packages:
|
|
21
|
-
|
|
22
|
-
```typescript
|
|
23
|
-
// Core workflow primitives
|
|
24
|
-
import { workflow, step, evaluator } from '@output.ai/core';
|
|
25
|
-
|
|
26
|
-
// LLM integration
|
|
27
|
-
import { generateText, loadPrompt } from '@output.ai/llm';
|
|
28
|
-
|
|
29
|
-
// HTTP utilities
|
|
30
|
-
import { httpClient } from '@output.ai/http';
|
|
31
|
-
```
|
|
32
|
-
|
|
33
|
-
## CLI Commands
|
|
34
|
-
|
|
35
|
-
After installation, CLI commands are available via npx:
|
|
36
|
-
|
|
37
|
-
```bash
|
|
38
|
-
# Initialize a new project
|
|
39
|
-
npx output init my-project
|
|
40
|
-
|
|
41
|
-
# List workflows
|
|
42
|
-
npx output workflow list
|
|
43
|
-
|
|
44
|
-
# Start the worker
|
|
45
|
-
npx output-worker
|
|
46
|
-
```
|
|
5
|
+
[](https://www.npmjs.com/package/@output.ai/output)
|
|
47
6
|
|
|
48
7
|
## Documentation
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
- [
|
|
53
|
-
- [
|
|
54
|
-
- [HTTP SDK](https://github.com/growthxai/output-sdk/tree/main/sdk/http#readme)
|
|
55
|
-
- [CLI](https://github.com/growthxai/output-sdk/tree/main/sdk/cli#readme)
|
|
56
|
-
|
|
57
|
-
## License
|
|
58
|
-
|
|
59
|
-
Apache-2.0
|
|
8
|
+
- [Getting Started](https://docs.output.ai/)
|
|
9
|
+
- [@output.ai/core](https://docs.output.ai/packages/core) - Workflow orchestration and worker runtime
|
|
10
|
+
- [@output.ai/llm](https://docs.output.ai/packages/llm) - LLM generation with prompt templating
|
|
11
|
+
- [@output.ai/http](https://docs.output.ai/packages/http) - HTTP client with tracing
|
|
12
|
+
- [@output.ai/cli](https://docs.output.ai/packages/cli) - CLI for creating and running workflows
|
package/package.json
CHANGED
|
@@ -1,13 +1,15 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@output.ai/output",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.1",
|
|
4
4
|
"description": "The Output.ai Framework",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"dependencies": {
|
|
7
|
-
"@output.ai/core": "0.
|
|
8
|
-
"@output.ai/llm": "0.2.
|
|
9
|
-
"@output.ai/http": "0.
|
|
10
|
-
|
|
7
|
+
"@output.ai/core": "0.4.9",
|
|
8
|
+
"@output.ai/llm": "0.2.13",
|
|
9
|
+
"@output.ai/http": "0.2.0"
|
|
10
|
+
},
|
|
11
|
+
"devDependencies": {
|
|
12
|
+
"@output.ai/cli": "^0.0.1"
|
|
11
13
|
},
|
|
12
14
|
"license": "Apache-2.0",
|
|
13
15
|
"publishConfig": {
|
|
@@ -17,5 +19,11 @@
|
|
|
17
19
|
"type": "git",
|
|
18
20
|
"url": "https://github.com/growthxai/output"
|
|
19
21
|
},
|
|
20
|
-
"keywords": [
|
|
22
|
+
"keywords": [
|
|
23
|
+
"temporal",
|
|
24
|
+
"workflow",
|
|
25
|
+
"llm",
|
|
26
|
+
"ai",
|
|
27
|
+
"sdk"
|
|
28
|
+
]
|
|
21
29
|
}
|