@workglow/ai 0.0.106 → 0.0.107

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/README.md +11 -4
  2. package/package.json +11 -11
package/README.md CHANGED
@@ -37,7 +37,7 @@ import {
37
37
  import { Workflow, getTaskQueueRegistry, TaskInput, TaskOutput } from "@workglow/task-graph";
38
38
  import { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from "@workglow/job-queue";
39
39
  import { InMemoryQueueStorage } from "@workglow/storage";
40
- import { HFT_TASKS, HF_TRANSFORMERS_ONNX, HuggingFaceTransformersProvider } from "@workglow/ai-provider";
40
+ import { HFT_TASKS, HFT_STREAM_TASKS, HFT_REACTIVE_TASKS, HF_TRANSFORMERS_ONNX, HuggingFaceTransformersProvider } from "@workglow/ai-provider";
41
41
 
42
42
  // 1. Set up a model repository
43
43
  const modelRepo = new InMemoryModelRepository();
@@ -56,7 +56,7 @@ await modelRepo.addModel({
56
56
  });
57
57
 
58
58
  // 3. Register provider (inline mode requires HFT_TASKS via constructor, creates queue automatically)
59
- await new HuggingFaceTransformersProvider(HFT_TASKS).register({ mode: "inline" });
59
+ await new HuggingFaceTransformersProvider(HFT_TASKS, HFT_STREAM_TASKS, HFT_REACTIVE_TASKS).register({ mode: "inline" });
60
60
 
61
61
  // 4. Or manually set up job queue (when queue.autoCreate: false)
62
62
  const queueName = HF_TRANSFORMERS_ONNX;
@@ -320,10 +320,17 @@ AI providers handle the actual execution of AI tasks. You need to register provi
320
320
  ### Basic Provider Registration
321
321
 
322
322
  ```typescript
323
- import { HFT_TASKS, HuggingFaceTransformersProvider } from "@workglow/ai-provider";
323
+ import {
324
+ HFT_TASKS,
325
+ HFT_STREAM_TASKS,
326
+ HFT_REACTIVE_TASKS,
327
+ HuggingFaceTransformersProvider,
328
+ } from "@workglow/ai-provider";
324
329
 
325
330
  // Registers run functions for all supported AI tasks on the current thread (inline mode requires HFT_TASKS)
326
- await new HuggingFaceTransformersProvider(HFT_TASKS).register({ mode: "inline" });
331
+ await new HuggingFaceTransformersProvider(HFT_TASKS, HFT_STREAM_TASKS, HFT_REACTIVE_TASKS).register(
332
+ { mode: "inline" }
333
+ );
327
334
  ```
328
335
 
329
336
  ### Worker-Based Provider Registration
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@workglow/ai",
3
3
  "type": "module",
4
- "version": "0.0.106",
4
+ "version": "0.0.107",
5
5
  "description": "Core AI functionality for Workglow, including task execution, model management, and AI pipeline orchestration.",
6
6
  "scripts": {
7
7
  "watch": "concurrently -c 'auto' 'bun:watch-*'",
@@ -36,11 +36,11 @@
36
36
  "access": "public"
37
37
  },
38
38
  "peerDependencies": {
39
- "@workglow/dataset": "0.0.105",
40
- "@workglow/job-queue": "0.0.105",
41
- "@workglow/storage": "0.0.105",
42
- "@workglow/task-graph": "0.0.105",
43
- "@workglow/util": "0.0.105"
39
+ "@workglow/dataset": "0.0.107",
40
+ "@workglow/job-queue": "0.0.107",
41
+ "@workglow/storage": "0.0.107",
42
+ "@workglow/task-graph": "0.0.107",
43
+ "@workglow/util": "0.0.107"
44
44
  },
45
45
  "peerDependenciesMeta": {
46
46
  "@workglow/dataset": {
@@ -60,10 +60,10 @@
60
60
  }
61
61
  },
62
62
  "devDependencies": {
63
- "@workglow/dataset": "0.0.105",
64
- "@workglow/job-queue": "0.0.105",
65
- "@workglow/storage": "0.0.105",
66
- "@workglow/task-graph": "0.0.105",
67
- "@workglow/util": "0.0.105"
63
+ "@workglow/dataset": "0.0.107",
64
+ "@workglow/job-queue": "0.0.107",
65
+ "@workglow/storage": "0.0.107",
66
+ "@workglow/task-graph": "0.0.107",
67
+ "@workglow/util": "0.0.107"
68
68
  }
69
69
  }