@hatchet-dev/typescript-sdk 0.20.3 → 1.0.0-alpha0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/clients/hatchet-client/features/cron-client.js +0 -1
- package/clients/hatchet-client/hatchet-client.d.ts +8 -7
- package/clients/hatchet-client/hatchet-client.js +9 -8
- package/clients/hatchet-client/hatchet-logger.js +1 -17
- package/clients/worker/handler.d.ts +2 -2
- package/clients/worker/worker.d.ts +4 -4
- package/clients/worker/worker.js +14 -28
- package/examples/api.js +1 -1
- package/examples/example-event-with-results.js +1 -1
- package/examples/stream-by-additional-meta.js +1 -1
- package/index.d.ts +3 -1
- package/index.js +5 -3
- package/package.json +1 -1
- package/sdk.d.ts +1 -1
- package/sdk.js +2 -2
- package/step.d.ts +75 -9
- package/step.js +65 -2
- package/util/hatchet-promise/hatchet-promise.js +1 -0
- package/v1/client/client.d.ts +81 -0
- package/v1/client/client.interface.d.ts +4 -0
- package/v1/client/client.interface.js +2 -0
- package/v1/client/client.js +134 -0
- package/v1/client/worker.d.ts +82 -0
- package/v1/client/worker.js +125 -0
- package/v1/examples/child_workflows/run.d.ts +1 -0
- package/v1/examples/child_workflows/run.js +30 -0
- package/v1/examples/child_workflows/worker.d.ts +1 -0
- package/v1/examples/child_workflows/worker.js +24 -0
- package/v1/examples/child_workflows/workflow.d.ts +19 -0
- package/v1/examples/child_workflows/workflow.js +43 -0
- package/v1/examples/client.d.ts +2 -0
- package/v1/examples/client.js +5 -0
- package/v1/examples/concurrency-rr/load.d.ts +1 -0
- package/v1/examples/concurrency-rr/load.js +54 -0
- package/v1/examples/concurrency-rr/run.d.ts +1 -0
- package/v1/examples/concurrency-rr/run.js +39 -0
- package/v1/examples/concurrency-rr/worker.d.ts +1 -0
- package/v1/examples/concurrency-rr/worker.js +24 -0
- package/v1/examples/concurrency-rr/workflow.d.ts +11 -0
- package/v1/examples/concurrency-rr/workflow.js +35 -0
- package/v1/examples/dag/run.d.ts +1 -0
- package/v1/examples/dag/run.js +24 -0
- package/v1/examples/dag/worker.d.ts +1 -0
- package/v1/examples/dag/worker.js +24 -0
- package/v1/examples/dag/workflow.d.ts +11 -0
- package/v1/examples/dag/workflow.js +35 -0
- package/v1/examples/deep/run.d.ts +1 -0
- package/v1/examples/deep/run.js +28 -0
- package/v1/examples/deep/worker.d.ts +1 -0
- package/v1/examples/deep/worker.js +25 -0
- package/v1/examples/deep/workflow.d.ts +18 -0
- package/v1/examples/deep/workflow.js +110 -0
- package/v1/examples/legacy/run.d.ts +1 -0
- package/v1/examples/legacy/run.js +25 -0
- package/v1/examples/legacy/worker.d.ts +1 -0
- package/v1/examples/legacy/worker.js +24 -0
- package/v1/examples/legacy/workflow.d.ts +2 -0
- package/v1/examples/legacy/workflow.js +36 -0
- package/v1/examples/on_event/event.d.ts +1 -0
- package/v1/examples/on_event/event.js +25 -0
- package/v1/examples/on_event/worker.d.ts +1 -0
- package/v1/examples/on_event/worker.js +24 -0
- package/v1/examples/on_event/workflow.d.ts +17 -0
- package/v1/examples/on_event/workflow.js +33 -0
- package/v1/examples/simple/cron.d.ts +1 -0
- package/v1/examples/simple/cron.js +26 -0
- package/v1/examples/simple/delay.d.ts +1 -0
- package/v1/examples/simple/delay.js +27 -0
- package/v1/examples/simple/run.d.ts +1 -0
- package/v1/examples/simple/run.js +24 -0
- package/v1/examples/simple/schedule.d.ts +1 -0
- package/v1/examples/simple/schedule.js +27 -0
- package/v1/examples/simple/worker.d.ts +1 -0
- package/v1/examples/simple/worker.js +24 -0
- package/v1/examples/simple/workflow.d.ts +5 -0
- package/v1/examples/simple/workflow.js +15 -0
- package/v1/index.d.ts +0 -0
- package/v1/index.js +1 -0
- package/v1/task.d.ts +51 -0
- package/v1/task.js +2 -0
- package/v1/workflow.d.ts +152 -0
- package/v1/workflow.js +145 -0
- package/version.d.ts +1 -1
- package/version.js +1 -1
- package/workflow.d.ts +3 -0
|
@@ -62,7 +62,6 @@ class CronClient {
|
|
|
62
62
|
return __awaiter(this, void 0, void 0, function* () {
|
|
63
63
|
var _a, _b, _c;
|
|
64
64
|
const workflowId = typeof workflow === 'string' ? workflow : workflow.id;
|
|
65
|
-
console.log('workflowId', workflowId);
|
|
66
65
|
// Validate cron input with zod schema
|
|
67
66
|
try {
|
|
68
67
|
const parsedCron = exports.CreateCronTriggerSchema.parse(cron);
|
|
@@ -2,10 +2,11 @@ import { EventClient } from '../event/event-client';
|
|
|
2
2
|
import { DispatcherClient } from '../dispatcher/dispatcher-client';
|
|
3
3
|
import { AdminClient } from '../admin/admin-client';
|
|
4
4
|
import { CallOptions, ChannelCredentials, ClientMiddlewareCall } from 'nice-grpc';
|
|
5
|
-
import { Workflow } from '../../workflow';
|
|
6
|
-
import {
|
|
5
|
+
import { Workflow as V0Workflow } from '../../workflow';
|
|
6
|
+
import { V0Worker, WorkerOpts } from '../worker';
|
|
7
7
|
import { AxiosRequestConfig } from 'axios';
|
|
8
8
|
import { Logger } from '../../util/logger';
|
|
9
|
+
import { Workflow as V1Workflow } from '../../v1/workflow';
|
|
9
10
|
import { ClientConfig } from './client-config';
|
|
10
11
|
import { ListenerClient } from '../listener/listener-client';
|
|
11
12
|
import { Api } from '../rest/generated/Api';
|
|
@@ -17,7 +18,7 @@ export interface HatchetClientOptions {
|
|
|
17
18
|
}
|
|
18
19
|
export declare const channelFactory: (config: ClientConfig, credentials: ChannelCredentials) => import("nice-grpc").Channel;
|
|
19
20
|
export declare const addTokenMiddleware: (token: string) => <Request, Response>(call: ClientMiddlewareCall<Request, Response>, options: CallOptions) => AsyncGenerator<Awaited<Response>, Awaited<Response> | undefined, undefined>;
|
|
20
|
-
export declare class
|
|
21
|
+
export declare class InternalHatchetClient {
|
|
21
22
|
config: ClientConfig;
|
|
22
23
|
credentials: ChannelCredentials;
|
|
23
24
|
event: EventClient;
|
|
@@ -30,8 +31,8 @@ export declare class HatchetClient {
|
|
|
30
31
|
cron: CronClient;
|
|
31
32
|
schedule: ScheduleClient;
|
|
32
33
|
constructor(config?: Partial<ClientConfig>, options?: HatchetClientOptions, axiosOpts?: AxiosRequestConfig);
|
|
33
|
-
static init(config?: Partial<ClientConfig>, options?: HatchetClientOptions, axiosConfig?: AxiosRequestConfig):
|
|
34
|
-
run(workflow: string |
|
|
35
|
-
worker(workflow: string |
|
|
36
|
-
webhooks(workflows:
|
|
34
|
+
static init(config?: Partial<ClientConfig>, options?: HatchetClientOptions, axiosConfig?: AxiosRequestConfig): InternalHatchetClient;
|
|
35
|
+
run(workflow: string | V0Workflow): Promise<V0Worker>;
|
|
36
|
+
worker(workflow: string | V0Workflow, opts?: Omit<WorkerOpts, 'name'> | number): Promise<V0Worker>;
|
|
37
|
+
webhooks(workflows: Array<V1Workflow<any, any> | V0Workflow>): import("../worker/handler").WebhookHandler;
|
|
37
38
|
}
|
|
@@ -37,7 +37,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
37
37
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
38
38
|
};
|
|
39
39
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
40
|
-
exports.
|
|
40
|
+
exports.InternalHatchetClient = exports.addTokenMiddleware = exports.channelFactory = void 0;
|
|
41
41
|
const zod_1 = require("zod");
|
|
42
42
|
const config_loader_1 = require("../../util/config-loader");
|
|
43
43
|
const event_client_1 = require("../event/event-client");
|
|
@@ -46,6 +46,7 @@ const admin_client_1 = require("../admin/admin-client");
|
|
|
46
46
|
const nice_grpc_1 = require("nice-grpc");
|
|
47
47
|
const worker_1 = require("../worker");
|
|
48
48
|
const hatchet_logger_1 = require("./hatchet-logger");
|
|
49
|
+
const worker_2 = require("../../v1/client/worker");
|
|
49
50
|
const client_config_1 = require("./client-config");
|
|
50
51
|
const listener_client_1 = require("../listener/listener-client");
|
|
51
52
|
const rest_1 = __importDefault(require("../rest"));
|
|
@@ -88,7 +89,7 @@ const addTokenMiddleware = (token) => function _(call, options) {
|
|
|
88
89
|
});
|
|
89
90
|
};
|
|
90
91
|
exports.addTokenMiddleware = addTokenMiddleware;
|
|
91
|
-
class
|
|
92
|
+
class InternalHatchetClient {
|
|
92
93
|
constructor(config, options, axiosOpts) {
|
|
93
94
|
// Initializes a new Client instance.
|
|
94
95
|
// Loads config in the following order: config param > yaml file > env vars
|
|
@@ -120,13 +121,13 @@ class HatchetClient {
|
|
|
120
121
|
this.listener = new listener_client_1.ListenerClient(this.config, (0, exports.channelFactory)(this.config, this.credentials), clientFactory, this.api);
|
|
121
122
|
this.admin = new admin_client_1.AdminClient(this.config, (0, exports.channelFactory)(this.config, this.credentials), clientFactory, this.api, this.tenantId, this.listener);
|
|
122
123
|
this.logger = this.config.logger('HatchetClient', this.config.log_level);
|
|
123
|
-
this.logger.
|
|
124
|
+
this.logger.debug(`Initialized HatchetClient`);
|
|
124
125
|
// Feature Clients
|
|
125
126
|
this.cron = new cron_client_1.CronClient(this.tenantId, this.config, this.api, this.admin);
|
|
126
127
|
this.schedule = new schedule_client_1.ScheduleClient(this.tenantId, this.config, this.api, this.admin);
|
|
127
128
|
}
|
|
128
129
|
static init(config, options, axiosConfig) {
|
|
129
|
-
return new
|
|
130
|
+
return new InternalHatchetClient(config, options, axiosConfig);
|
|
130
131
|
}
|
|
131
132
|
// @deprecated
|
|
132
133
|
run(workflow) {
|
|
@@ -150,7 +151,7 @@ class HatchetClient {
|
|
|
150
151
|
else {
|
|
151
152
|
options = Object.assign(Object.assign({}, options), opts);
|
|
152
153
|
}
|
|
153
|
-
const worker = new worker_1.
|
|
154
|
+
const worker = new worker_1.V0Worker(this, options);
|
|
154
155
|
if (typeof workflow !== 'string') {
|
|
155
156
|
yield worker.registerWorkflow(workflow);
|
|
156
157
|
return worker;
|
|
@@ -159,10 +160,10 @@ class HatchetClient {
|
|
|
159
160
|
});
|
|
160
161
|
}
|
|
161
162
|
webhooks(workflows) {
|
|
162
|
-
const worker = new worker_1.
|
|
163
|
+
const worker = new worker_1.V0Worker(this, {
|
|
163
164
|
name: 'webhook-worker',
|
|
164
165
|
});
|
|
165
|
-
return worker.getHandler(workflows);
|
|
166
|
+
return worker.getHandler(workflows.map(worker_2.toV0Workflow));
|
|
166
167
|
}
|
|
167
168
|
}
|
|
168
|
-
exports.
|
|
169
|
+
exports.InternalHatchetClient = InternalHatchetClient;
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.HatchetLogger = exports.DEFAULT_LOGGER = void 0;
|
|
4
|
-
/* eslint-disable no-console */
|
|
5
4
|
const logger_1 = require("../../util/logger");
|
|
6
5
|
const DEFAULT_LOGGER = (context, logLevel) => new HatchetLogger(context, logLevel);
|
|
7
6
|
exports.DEFAULT_LOGGER = DEFAULT_LOGGER;
|
|
@@ -21,22 +20,7 @@ class HatchetLogger {
|
|
|
21
20
|
second: '2-digit',
|
|
22
21
|
});
|
|
23
22
|
// eslint-disable-next-line no-console
|
|
24
|
-
|
|
25
|
-
let print = console.log;
|
|
26
|
-
if (level === 'ERROR') {
|
|
27
|
-
print = console.error;
|
|
28
|
-
}
|
|
29
|
-
if (level === 'WARN') {
|
|
30
|
-
print = console.warn;
|
|
31
|
-
}
|
|
32
|
-
if (level === 'INFO') {
|
|
33
|
-
print = console.info;
|
|
34
|
-
}
|
|
35
|
-
if (level === 'DEBUG') {
|
|
36
|
-
print = console.debug;
|
|
37
|
-
}
|
|
38
|
-
// eslint-disable-next-line no-console
|
|
39
|
-
print(`🪓 ${process.pid} | ${time} ${color && `\x1b[${color || ''}m`} [${level}/${this.context}] ${message}\x1b[0m`);
|
|
23
|
+
console.log(`🪓 ${process.pid} | ${time} ${color && `\x1b[${color || ''}m`} [${level}/${this.context}] ${message}\x1b[0m`);
|
|
40
24
|
}
|
|
41
25
|
}
|
|
42
26
|
debug(message) {
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import { IncomingMessage, ServerResponse } from 'http';
|
|
2
2
|
import { Workflow } from '../../workflow';
|
|
3
|
-
import {
|
|
3
|
+
import { V0Worker } from './worker';
|
|
4
4
|
export interface HandlerOpts {
|
|
5
5
|
secret: string;
|
|
6
6
|
}
|
|
7
7
|
export declare class WebhookHandler {
|
|
8
8
|
private worker;
|
|
9
9
|
private workflows;
|
|
10
|
-
constructor(worker:
|
|
10
|
+
constructor(worker: V0Worker, workflows: Workflow[]);
|
|
11
11
|
/**
|
|
12
12
|
* Handles a request with a provided body, secret, and signature.
|
|
13
13
|
*
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { InternalHatchetClient } from '../hatchet-client';
|
|
2
2
|
import { Action, ActionListener } from '../dispatcher/action-listener';
|
|
3
3
|
import { StepActionEvent, StepActionEventType, GroupKeyActionEvent, GroupKeyActionEventType } from '../../protoc/dispatcher';
|
|
4
4
|
import HatchetPromise from '../../util/hatchet-promise/hatchet-promise';
|
|
@@ -15,8 +15,8 @@ export interface WorkerOpts {
|
|
|
15
15
|
maxRuns?: number;
|
|
16
16
|
labels?: WorkerLabels;
|
|
17
17
|
}
|
|
18
|
-
export declare class
|
|
19
|
-
client:
|
|
18
|
+
export declare class V0Worker {
|
|
19
|
+
client: InternalHatchetClient;
|
|
20
20
|
name: string;
|
|
21
21
|
workerId: string | undefined;
|
|
22
22
|
killing: boolean;
|
|
@@ -30,7 +30,7 @@ export declare class Worker {
|
|
|
30
30
|
logger: Logger;
|
|
31
31
|
registeredWorkflowPromises: Array<Promise<any>>;
|
|
32
32
|
labels: WorkerLabels;
|
|
33
|
-
constructor(client:
|
|
33
|
+
constructor(client: InternalHatchetClient, options: {
|
|
34
34
|
name: string;
|
|
35
35
|
handleKill?: boolean;
|
|
36
36
|
maxRuns?: number;
|
package/clients/worker/worker.js
CHANGED
|
@@ -19,14 +19,14 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
19
19
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
20
20
|
};
|
|
21
21
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
22
|
-
exports.
|
|
22
|
+
exports.V0Worker = void 0;
|
|
23
23
|
const hatchet_error_1 = __importDefault(require("../../util/errors/hatchet-error"));
|
|
24
24
|
const dispatcher_1 = require("../../protoc/dispatcher");
|
|
25
25
|
const hatchet_promise_1 = __importDefault(require("../../util/hatchet-promise/hatchet-promise"));
|
|
26
26
|
const workflows_1 = require("../../protoc/workflows");
|
|
27
27
|
const handler_1 = require("./handler");
|
|
28
28
|
const step_1 = require("../../step");
|
|
29
|
-
class
|
|
29
|
+
class V0Worker {
|
|
30
30
|
constructor(client, options) {
|
|
31
31
|
this.workflow_registry = [];
|
|
32
32
|
this.futures = {};
|
|
@@ -187,6 +187,8 @@ class Worker {
|
|
|
187
187
|
// Send the action event to the dispatcher
|
|
188
188
|
const event = this.getStepActionEvent(action, dispatcher_1.StepActionEventType.STEP_EVENT_TYPE_COMPLETED, result || null);
|
|
189
189
|
yield this.client.dispatcher.sendStepActionEvent(event);
|
|
190
|
+
// delete the run from the futures
|
|
191
|
+
delete this.futures[action.stepRunId];
|
|
190
192
|
}
|
|
191
193
|
catch (actionEventError) {
|
|
192
194
|
this.logger.error(`Could not send completed action event: ${actionEventError.message || actionEventError}`);
|
|
@@ -200,11 +202,6 @@ class Worker {
|
|
|
200
202
|
}
|
|
201
203
|
this.logger.error(`Could not send action event: ${actionEventError.message || actionEventError}`);
|
|
202
204
|
}
|
|
203
|
-
finally {
|
|
204
|
-
// delete the run from the futures
|
|
205
|
-
delete this.futures[action.stepRunId];
|
|
206
|
-
delete this.contexts[action.stepRunId];
|
|
207
|
-
}
|
|
208
205
|
});
|
|
209
206
|
const failure = (error) => __awaiter(this, void 0, void 0, function* () {
|
|
210
207
|
this.logger.error(`Step run ${action.stepRunId} failed: ${error.message}`);
|
|
@@ -218,15 +215,12 @@ class Worker {
|
|
|
218
215
|
stack: error === null || error === void 0 ? void 0 : error.stack,
|
|
219
216
|
});
|
|
220
217
|
yield this.client.dispatcher.sendStepActionEvent(event);
|
|
218
|
+
// delete the run from the futures
|
|
219
|
+
delete this.futures[action.stepRunId];
|
|
221
220
|
}
|
|
222
221
|
catch (e) {
|
|
223
222
|
this.logger.error(`Could not send action event: ${e.message}`);
|
|
224
223
|
}
|
|
225
|
-
finally {
|
|
226
|
-
// delete the run from the futures
|
|
227
|
-
delete this.futures[action.stepRunId];
|
|
228
|
-
delete this.contexts[action.stepRunId];
|
|
229
|
-
}
|
|
230
224
|
});
|
|
231
225
|
const future = new hatchet_promise_1.default((() => __awaiter(this, void 0, void 0, function* () {
|
|
232
226
|
let result;
|
|
@@ -285,15 +279,12 @@ class Worker {
|
|
|
285
279
|
this.client.dispatcher.sendGroupKeyActionEvent(event).catch((e) => {
|
|
286
280
|
this.logger.error(`Could not send action event: ${e.message}`);
|
|
287
281
|
});
|
|
282
|
+
// delete the run from the futures
|
|
283
|
+
delete this.futures[key];
|
|
288
284
|
}
|
|
289
285
|
catch (e) {
|
|
290
286
|
this.logger.error(`Could not send action event: ${e.message}`);
|
|
291
287
|
}
|
|
292
|
-
finally {
|
|
293
|
-
// delete the run from the futures
|
|
294
|
-
delete this.futures[key];
|
|
295
|
-
delete this.contexts[key];
|
|
296
|
-
}
|
|
297
288
|
};
|
|
298
289
|
const failure = (error) => {
|
|
299
290
|
this.logger.error(`Step run ${key} failed: ${error.message}`);
|
|
@@ -303,15 +294,12 @@ class Worker {
|
|
|
303
294
|
this.client.dispatcher.sendGroupKeyActionEvent(event).catch((e) => {
|
|
304
295
|
this.logger.error(`Could not send action event: ${e.message}`);
|
|
305
296
|
});
|
|
297
|
+
// delete the run from the futures
|
|
298
|
+
delete this.futures[key];
|
|
306
299
|
}
|
|
307
300
|
catch (e) {
|
|
308
301
|
this.logger.error(`Could not send action event: ${e.message}`);
|
|
309
302
|
}
|
|
310
|
-
finally {
|
|
311
|
-
// delete the run from the futures
|
|
312
|
-
delete this.futures[key];
|
|
313
|
-
delete this.contexts[key];
|
|
314
|
-
}
|
|
315
303
|
};
|
|
316
304
|
const future = new hatchet_promise_1.default(run().then(success).catch(failure));
|
|
317
305
|
this.futures[key] = future;
|
|
@@ -356,13 +344,14 @@ class Worker {
|
|
|
356
344
|
}
|
|
357
345
|
handleCancelStepRun(action) {
|
|
358
346
|
return __awaiter(this, void 0, void 0, function* () {
|
|
359
|
-
const { stepRunId } = action;
|
|
360
347
|
try {
|
|
361
348
|
this.logger.info(`Cancelling step run ${action.stepRunId}`);
|
|
349
|
+
const { stepRunId } = action;
|
|
362
350
|
const future = this.futures[stepRunId];
|
|
363
351
|
const context = this.contexts[stepRunId];
|
|
364
352
|
if (context && context.controller) {
|
|
365
353
|
context.controller.abort('Cancelled by worker');
|
|
354
|
+
delete this.contexts[stepRunId];
|
|
366
355
|
}
|
|
367
356
|
if (future) {
|
|
368
357
|
future.promise.catch(() => {
|
|
@@ -370,15 +359,12 @@ class Worker {
|
|
|
370
359
|
});
|
|
371
360
|
future.cancel('Cancelled by worker');
|
|
372
361
|
yield future.promise;
|
|
362
|
+
delete this.futures[stepRunId];
|
|
373
363
|
}
|
|
374
364
|
}
|
|
375
365
|
catch (e) {
|
|
376
366
|
this.logger.error('Could not cancel step run: ', e);
|
|
377
367
|
}
|
|
378
|
-
finally {
|
|
379
|
-
delete this.futures[stepRunId];
|
|
380
|
-
delete this.contexts[stepRunId];
|
|
381
|
-
}
|
|
382
368
|
});
|
|
383
369
|
}
|
|
384
370
|
stop() {
|
|
@@ -482,7 +468,7 @@ class Worker {
|
|
|
482
468
|
});
|
|
483
469
|
}
|
|
484
470
|
}
|
|
485
|
-
exports.
|
|
471
|
+
exports.V0Worker = V0Worker;
|
|
486
472
|
function toPbWorkerLabel(in_) {
|
|
487
473
|
if (!in_) {
|
|
488
474
|
return {};
|
package/examples/api.js
CHANGED
|
@@ -49,7 +49,7 @@ function main() {
|
|
|
49
49
|
const { admin } = hatchet;
|
|
50
50
|
yield admin.putWorkflow(opts);
|
|
51
51
|
const worker = yield hatchet.worker('example-worker');
|
|
52
|
-
worker.registerAction('slack:example', (ctx) => __awaiter(this, void 0, void 0, function* () {
|
|
52
|
+
worker.v0.registerAction('slack:example', (ctx) => __awaiter(this, void 0, void 0, function* () {
|
|
53
53
|
const setData = ctx.userData();
|
|
54
54
|
console.log('executed step1!', setData);
|
|
55
55
|
return { step1: 'step1' };
|
|
@@ -27,7 +27,7 @@ function main() {
|
|
|
27
27
|
const ref = yield hatchet.admin.runWorkflow('simple-workflow', {
|
|
28
28
|
test: 'test',
|
|
29
29
|
});
|
|
30
|
-
const listener = yield hatchet.listener.stream(yield ref.getWorkflowRunId());
|
|
30
|
+
const listener = yield hatchet.v0.listener.stream(yield ref.getWorkflowRunId());
|
|
31
31
|
console.log('listening for events');
|
|
32
32
|
try {
|
|
33
33
|
for (var _d = true, listener_1 = __asyncValues(listener), listener_1_1; listener_1_1 = yield listener_1.next(), _a = listener_1_1.done, !_a; _d = true) {
|
|
@@ -34,7 +34,7 @@ function main() {
|
|
|
34
34
|
// and can have an arbitrary property name.
|
|
35
35
|
yield hatchet.admin.runWorkflow('parent-workflow', {}, { additionalMetadata: { [streamKey]: streamVal } });
|
|
36
36
|
// Stream all events for the additional meta key value
|
|
37
|
-
const stream = yield hatchet.listener.streamByAdditionalMeta(streamKey, streamVal);
|
|
37
|
+
const stream = yield hatchet.v0.listener.streamByAdditionalMeta(streamKey, streamVal);
|
|
38
38
|
try {
|
|
39
39
|
for (var _d = true, stream_1 = __asyncValues(stream), stream_1_1; stream_1_1 = yield stream_1.next(), _a = stream_1_1.done, !_a; _d = true) {
|
|
40
40
|
_c = stream_1_1.value;
|
package/index.d.ts
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
|
-
import { HatchetClient as Hatchet } from './
|
|
1
|
+
import { HatchetClient as Hatchet } from './v1/client/client';
|
|
2
2
|
export * from './workflow';
|
|
3
3
|
export * from './step';
|
|
4
4
|
export * from './clients/worker';
|
|
5
5
|
export * from './clients/rest';
|
|
6
6
|
export * from './clients/admin';
|
|
7
7
|
export * from './util/workflow-run-ref';
|
|
8
|
+
export * from './v1/client/client';
|
|
9
|
+
export * from './v1/client/worker';
|
|
8
10
|
export default Hatchet;
|
|
9
11
|
export { Hatchet };
|
package/index.js
CHANGED
|
@@ -15,12 +15,14 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
|
15
15
|
};
|
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
17
|
exports.Hatchet = void 0;
|
|
18
|
-
const
|
|
19
|
-
Object.defineProperty(exports, "Hatchet", { enumerable: true, get: function () { return
|
|
18
|
+
const client_1 = require("./v1/client/client");
|
|
19
|
+
Object.defineProperty(exports, "Hatchet", { enumerable: true, get: function () { return client_1.HatchetClient; } });
|
|
20
20
|
__exportStar(require("./workflow"), exports);
|
|
21
21
|
__exportStar(require("./step"), exports);
|
|
22
22
|
__exportStar(require("./clients/worker"), exports);
|
|
23
23
|
__exportStar(require("./clients/rest"), exports);
|
|
24
24
|
__exportStar(require("./clients/admin"), exports);
|
|
25
25
|
__exportStar(require("./util/workflow-run-ref"), exports);
|
|
26
|
-
exports
|
|
26
|
+
__exportStar(require("./v1/client/client"), exports);
|
|
27
|
+
__exportStar(require("./v1/client/worker"), exports);
|
|
28
|
+
exports.default = client_1.HatchetClient;
|
package/package.json
CHANGED
package/sdk.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { HatchetClient as Hatchet } from './
|
|
1
|
+
import { HatchetClient as Hatchet } from './v1/client/client';
|
|
2
2
|
export default Hatchet;
|
package/sdk.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
const
|
|
4
|
-
exports.default =
|
|
3
|
+
const client_1 = require("./v1/client/client");
|
|
4
|
+
exports.default = client_1.HatchetClient;
|
package/step.d.ts
CHANGED
|
@@ -3,11 +3,13 @@ import { Workflow } from './workflow';
|
|
|
3
3
|
import { Action } from './clients/dispatcher/action-listener';
|
|
4
4
|
import { LogLevel } from './clients/event/event-client';
|
|
5
5
|
import { Logger } from './util/logger';
|
|
6
|
-
import {
|
|
6
|
+
import { InternalHatchetClient } from './clients/hatchet-client';
|
|
7
7
|
import WorkflowRunRef from './util/workflow-run-ref';
|
|
8
|
-
import {
|
|
8
|
+
import { V0Worker } from './clients/worker';
|
|
9
9
|
import { WorkerLabels } from './clients/dispatcher/dispatcher-client';
|
|
10
10
|
import { CreateStepRateLimit, RateLimitDuration, WorkerLabelComparator } from './protoc/workflows';
|
|
11
|
+
import { CreateTaskOpts } from './v1/task';
|
|
12
|
+
import { Workflow as WorkflowV1 } from './v1/workflow';
|
|
11
13
|
export declare const CreateRateLimitSchema: z.ZodObject<{
|
|
12
14
|
key: z.ZodOptional<z.ZodString>;
|
|
13
15
|
staticKey: z.ZodOptional<z.ZodString>;
|
|
@@ -166,7 +168,7 @@ interface ContextData<T, K> {
|
|
|
166
168
|
}
|
|
167
169
|
export declare class ContextWorker {
|
|
168
170
|
private worker;
|
|
169
|
-
constructor(worker:
|
|
171
|
+
constructor(worker: V0Worker);
|
|
170
172
|
id(): string | undefined;
|
|
171
173
|
hasWorkflow(workflowName: string): boolean;
|
|
172
174
|
labels(): WorkerLabels;
|
|
@@ -177,13 +179,14 @@ export declare class Context<T, K = {}> {
|
|
|
177
179
|
input: T;
|
|
178
180
|
controller: AbortController;
|
|
179
181
|
action: Action;
|
|
180
|
-
client:
|
|
182
|
+
client: InternalHatchetClient;
|
|
181
183
|
worker: ContextWorker;
|
|
182
184
|
overridesData: Record<string, any>;
|
|
183
185
|
logger: Logger;
|
|
184
186
|
spawnIndex: number;
|
|
185
|
-
constructor(action: Action, client:
|
|
186
|
-
|
|
187
|
+
constructor(action: Action, client: InternalHatchetClient, worker: V0Worker);
|
|
188
|
+
parentData<L = NextStep>(task: CreateTaskOpts<any, L> | string): Promise<L>;
|
|
189
|
+
stepOutput<L = NextStep>(step: string): L;
|
|
187
190
|
stepRunErrors(): Record<string, string>;
|
|
188
191
|
triggeredByEvent(): boolean;
|
|
189
192
|
workflowInput(): T;
|
|
@@ -203,14 +206,43 @@ export declare class Context<T, K = {}> {
|
|
|
203
206
|
refreshTimeout(incrementBy: string): Promise<void>;
|
|
204
207
|
releaseSlot(): Promise<void>;
|
|
205
208
|
putStream(data: string | Uint8Array): Promise<void>;
|
|
209
|
+
/**
|
|
210
|
+
* Enqueues multiple children workflows in parallel.
|
|
211
|
+
* @param children an array of objects containing the workflow name, input data, and options for each workflow
|
|
212
|
+
* @returns a list of workflow run references to the enqueued runs
|
|
213
|
+
*/
|
|
214
|
+
bulkEnqueueChildren<Q extends Record<string, any> = any, P extends Record<string, any> = any>(children: Array<{
|
|
215
|
+
workflow: string | Workflow | WorkflowV1<Q, P>;
|
|
216
|
+
input: Q;
|
|
217
|
+
options?: {
|
|
218
|
+
key?: string;
|
|
219
|
+
sticky?: boolean;
|
|
220
|
+
additionalMetadata?: Record<string, string>;
|
|
221
|
+
};
|
|
222
|
+
}>): Promise<WorkflowRunRef<P>[]>;
|
|
223
|
+
/**
|
|
224
|
+
* Runs multiple children workflows in parallel.
|
|
225
|
+
* @param children an array of objects containing the workflow name, input data, and options for each workflow
|
|
226
|
+
* @returns a list of results from the children workflows
|
|
227
|
+
*/
|
|
228
|
+
bulkRunChildren<Q extends Record<string, any> = any, P extends Record<string, any> = any>(children: Array<{
|
|
229
|
+
workflow: string | Workflow | WorkflowV1<Q, P>;
|
|
230
|
+
input: Q;
|
|
231
|
+
options?: {
|
|
232
|
+
key?: string;
|
|
233
|
+
sticky?: boolean;
|
|
234
|
+
additionalMetadata?: Record<string, string>;
|
|
235
|
+
};
|
|
236
|
+
}>): Promise<P[]>;
|
|
206
237
|
/**
|
|
207
238
|
* Spawns multiple workflows.
|
|
208
239
|
*
|
|
209
240
|
* @param workflows an array of objects containing the workflow name, input data, and options for each workflow
|
|
210
241
|
* @returns a list of references to the spawned workflow runs
|
|
242
|
+
* @deprecated use bulkEnqueueChildren or bulkRunChildren instead
|
|
211
243
|
*/
|
|
212
|
-
spawnWorkflows<Q =
|
|
213
|
-
workflow: string | Workflow
|
|
244
|
+
spawnWorkflows<Q extends Record<string, any> = any, P extends Record<string, any> = any>(workflows: Array<{
|
|
245
|
+
workflow: string | Workflow | WorkflowV1<Q, P>;
|
|
214
246
|
input: Q;
|
|
215
247
|
options?: {
|
|
216
248
|
key?: string;
|
|
@@ -218,6 +250,39 @@ export declare class Context<T, K = {}> {
|
|
|
218
250
|
additionalMetadata?: Record<string, string>;
|
|
219
251
|
};
|
|
220
252
|
}>): Promise<WorkflowRunRef<P>[]>;
|
|
253
|
+
/**
|
|
254
|
+
* Runs a new workflow.
|
|
255
|
+
*
|
|
256
|
+
* @param workflow the workflow to run
|
|
257
|
+
* @param input the input data for the workflow
|
|
258
|
+
* @param options additional options for spawning the workflow. If a string is provided, it is used as the key.
|
|
259
|
+
* @param <Q> the type of the input data
|
|
260
|
+
* @param <P> the type of the output data
|
|
261
|
+
* @return the result of the workflow
|
|
262
|
+
*/
|
|
263
|
+
runChild<Q extends Record<string, any>, P extends Record<string, any>>(workflow: string | Workflow | WorkflowV1<Q, P>, input: Q, options?: string | {
|
|
264
|
+
key?: string;
|
|
265
|
+
sticky?: boolean;
|
|
266
|
+
additionalMetadata?: Record<string, string>;
|
|
267
|
+
}): Promise<P>;
|
|
268
|
+
/**
|
|
269
|
+
* Enqueues a new workflow.
|
|
270
|
+
*
|
|
271
|
+
* @param workflowName the name of the workflow to spawn
|
|
272
|
+
* @param input the input data for the workflow
|
|
273
|
+
* @param options additional options for spawning the workflow. If a string is provided, it is used as the key.
|
|
274
|
+
* If an object is provided, it can include:
|
|
275
|
+
* - key: a unique identifier for the workflow (deprecated, use options.key instead)
|
|
276
|
+
* - sticky: a boolean indicating whether to use sticky execution
|
|
277
|
+
* @param <Q> the type of the input data
|
|
278
|
+
* @param <P> the type of the output data
|
|
279
|
+
* @return a reference to the spawned workflow run
|
|
280
|
+
*/
|
|
281
|
+
enqueueChild<Q extends Record<string, any>, P extends Record<string, any>>(workflow: string | Workflow | WorkflowV1<Q, P>, input: Q, options?: string | {
|
|
282
|
+
key?: string;
|
|
283
|
+
sticky?: boolean;
|
|
284
|
+
additionalMetadata?: Record<string, string>;
|
|
285
|
+
}): WorkflowRunRef<P>;
|
|
221
286
|
/**
|
|
222
287
|
* Spawns a new workflow.
|
|
223
288
|
*
|
|
@@ -230,8 +295,9 @@ export declare class Context<T, K = {}> {
|
|
|
230
295
|
* @param <Q> the type of the input data
|
|
231
296
|
* @param <P> the type of the output data
|
|
232
297
|
* @return a reference to the spawned workflow run
|
|
298
|
+
* @deprecated use runChild or enqueueChild instead
|
|
233
299
|
*/
|
|
234
|
-
spawnWorkflow<Q
|
|
300
|
+
spawnWorkflow<Q extends Record<string, any>, P extends Record<string, any>>(workflow: string | Workflow | WorkflowV1<Q, P>, input: Q, options?: string | {
|
|
235
301
|
key?: string;
|
|
236
302
|
sticky?: boolean;
|
|
237
303
|
additionalMetadata?: Record<string, string>;
|
package/step.js
CHANGED
|
@@ -50,7 +50,6 @@ exports.mapRateLimit = mapRateLimit;
|
|
|
50
50
|
/* eslint-disable max-classes-per-file */
|
|
51
51
|
const hatchet_error_1 = __importDefault(require("./util/errors/hatchet-error"));
|
|
52
52
|
const z = __importStar(require("zod"));
|
|
53
|
-
const workflow_1 = require("./workflow");
|
|
54
53
|
const parse_1 = require("./util/parse");
|
|
55
54
|
const workflows_1 = require("./protoc/workflows");
|
|
56
55
|
exports.CreateRateLimitSchema = z.object({
|
|
@@ -79,7 +78,7 @@ exports.DesiredWorkerLabelSchema = z
|
|
|
79
78
|
exports.CreateStepSchema = z.object({
|
|
80
79
|
name: z.string(),
|
|
81
80
|
parents: z.array(z.string()).optional(),
|
|
82
|
-
timeout:
|
|
81
|
+
timeout: z.string().optional(),
|
|
83
82
|
retries: z.number().optional(),
|
|
84
83
|
rate_limits: z.array(exports.CreateRateLimitSchema).optional(),
|
|
85
84
|
worker_labels: z.record(z.lazy(() => exports.DesiredWorkerLabelSchema)).optional(),
|
|
@@ -133,6 +132,16 @@ class Context {
|
|
|
133
132
|
throw new hatchet_error_1.default(`Could not parse payload: ${e.message}`);
|
|
134
133
|
}
|
|
135
134
|
}
|
|
135
|
+
// NOTE: parentData is async since we plan on potentially making this a cacheable server call
|
|
136
|
+
parentData(task) {
|
|
137
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
138
|
+
if (typeof task === 'string') {
|
|
139
|
+
return this.stepOutput(task);
|
|
140
|
+
}
|
|
141
|
+
return this.stepOutput(task.name);
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
// TODO deprecated
|
|
136
145
|
stepOutput(step) {
|
|
137
146
|
if (!this.data.parents) {
|
|
138
147
|
throw new hatchet_error_1.default('Step output not found');
|
|
@@ -227,11 +236,32 @@ class Context {
|
|
|
227
236
|
yield this.client.event.putStream(stepRunId, data);
|
|
228
237
|
});
|
|
229
238
|
}
|
|
239
|
+
/**
|
|
240
|
+
* Enqueues multiple children workflows in parallel.
|
|
241
|
+
* @param children an array of objects containing the workflow name, input data, and options for each workflow
|
|
242
|
+
* @returns a list of workflow run references to the enqueued runs
|
|
243
|
+
*/
|
|
244
|
+
bulkEnqueueChildren(children) {
|
|
245
|
+
return this.spawnWorkflows(children);
|
|
246
|
+
}
|
|
247
|
+
/**
|
|
248
|
+
* Runs multiple children workflows in parallel.
|
|
249
|
+
* @param children an array of objects containing the workflow name, input data, and options for each workflow
|
|
250
|
+
* @returns a list of results from the children workflows
|
|
251
|
+
*/
|
|
252
|
+
bulkRunChildren(children) {
|
|
253
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
254
|
+
const runs = yield this.bulkEnqueueChildren(children);
|
|
255
|
+
const res = runs.map((run) => run.result());
|
|
256
|
+
return Promise.all(res);
|
|
257
|
+
});
|
|
258
|
+
}
|
|
230
259
|
/**
|
|
231
260
|
* Spawns multiple workflows.
|
|
232
261
|
*
|
|
233
262
|
* @param workflows an array of objects containing the workflow name, input data, and options for each workflow
|
|
234
263
|
* @returns a list of references to the spawned workflow runs
|
|
264
|
+
* @deprecated use bulkEnqueueChildren or bulkRunChildren instead
|
|
235
265
|
*/
|
|
236
266
|
spawnWorkflows(workflows) {
|
|
237
267
|
const { workflowRunId, stepRunId } = this.action;
|
|
@@ -278,6 +308,38 @@ class Context {
|
|
|
278
308
|
throw new hatchet_error_1.default(e.message);
|
|
279
309
|
}
|
|
280
310
|
}
|
|
311
|
+
/**
|
|
312
|
+
* Runs a new workflow.
|
|
313
|
+
*
|
|
314
|
+
* @param workflow the workflow to run
|
|
315
|
+
* @param input the input data for the workflow
|
|
316
|
+
* @param options additional options for spawning the workflow. If a string is provided, it is used as the key.
|
|
317
|
+
* @param <Q> the type of the input data
|
|
318
|
+
* @param <P> the type of the output data
|
|
319
|
+
* @return the result of the workflow
|
|
320
|
+
*/
|
|
321
|
+
runChild(workflow, input, options) {
|
|
322
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
323
|
+
const run = yield this.spawnWorkflow(workflow, input, options);
|
|
324
|
+
return run.result();
|
|
325
|
+
});
|
|
326
|
+
}
|
|
327
|
+
/**
|
|
328
|
+
* Enqueues a new workflow.
|
|
329
|
+
*
|
|
330
|
+
* @param workflowName the name of the workflow to spawn
|
|
331
|
+
* @param input the input data for the workflow
|
|
332
|
+
* @param options additional options for spawning the workflow. If a string is provided, it is used as the key.
|
|
333
|
+
* If an object is provided, it can include:
|
|
334
|
+
* - key: a unique identifier for the workflow (deprecated, use options.key instead)
|
|
335
|
+
* - sticky: a boolean indicating whether to use sticky execution
|
|
336
|
+
* @param <Q> the type of the input data
|
|
337
|
+
* @param <P> the type of the output data
|
|
338
|
+
* @return a reference to the spawned workflow run
|
|
339
|
+
*/
|
|
340
|
+
enqueueChild(workflow, input, options) {
|
|
341
|
+
return this.spawnWorkflow(workflow, input, options);
|
|
342
|
+
}
|
|
281
343
|
/**
|
|
282
344
|
* Spawns a new workflow.
|
|
283
345
|
*
|
|
@@ -290,6 +352,7 @@ class Context {
|
|
|
290
352
|
* @param <Q> the type of the input data
|
|
291
353
|
* @param <P> the type of the output data
|
|
292
354
|
* @return a reference to the spawned workflow run
|
|
355
|
+
* @deprecated use runChild or enqueueChild instead
|
|
293
356
|
*/
|
|
294
357
|
spawnWorkflow(workflow, input, options) {
|
|
295
358
|
const { workflowRunId, stepRunId } = this.action;
|