@hatchet-dev/typescript-sdk 1.4.0-alpha.2 → 1.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/clients/dispatcher/action-listener.d.ts +2 -0
- package/clients/dispatcher/action-listener.js +16 -0
- package/clients/hatchet-client/features/cron-client.d.ts +1 -1
- package/clients/hatchet-client/features/cron-client.js +1 -5
- package/clients/worker/worker.d.ts +3 -3
- package/clients/worker/worker.js +12 -11
- package/package.json +6 -4
- package/scripts/version-check.js +65 -0
- package/util/batch.d.ts +7 -0
- package/util/batch.js +37 -0
- package/v1/client/admin.d.ts +0 -1
- package/v1/client/admin.js +9 -48
- package/v1/client/worker/worker.d.ts +2 -2
- package/v1/client/worker/worker.js +7 -6
- package/v1/examples/migration-guides/hatchet-client.d.ts +2 -0
- package/v1/examples/migration-guides/hatchet-client.js +8 -0
- package/v1/examples/migration-guides/mergent.d.ts +23 -0
- package/v1/examples/migration-guides/mergent.js +109 -0
- package/v1/index.d.ts +1 -0
- package/v1/index.js +1 -0
- package/version.d.ts +1 -1
- package/version.js +1 -1
|
@@ -9,6 +9,8 @@ declare enum ListenStrategy {
|
|
|
9
9
|
}
|
|
10
10
|
export interface Action extends AssignedAction {
|
|
11
11
|
}
|
|
12
|
+
export type ActionKey = string;
|
|
13
|
+
export declare function createActionKey(action: Action): ActionKey;
|
|
12
14
|
export declare class ActionListener {
|
|
13
15
|
config: ClientConfig;
|
|
14
16
|
client: PbDispatcherClient;
|
|
@@ -33,6 +33,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
33
33
|
};
|
|
34
34
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
35
35
|
exports.ActionListener = void 0;
|
|
36
|
+
exports.createActionKey = createActionKey;
|
|
37
|
+
const dispatcher_1 = require("../../protoc/dispatcher");
|
|
36
38
|
const nice_grpc_1 = require("nice-grpc");
|
|
37
39
|
const sleep_1 = __importDefault(require("../../util/sleep"));
|
|
38
40
|
const hatchet_error_1 = __importDefault(require("../../util/errors/hatchet-error"));
|
|
@@ -45,6 +47,20 @@ var ListenStrategy;
|
|
|
45
47
|
ListenStrategy[ListenStrategy["LISTEN_STRATEGY_V1"] = 1] = "LISTEN_STRATEGY_V1";
|
|
46
48
|
ListenStrategy[ListenStrategy["LISTEN_STRATEGY_V2"] = 2] = "LISTEN_STRATEGY_V2";
|
|
47
49
|
})(ListenStrategy || (ListenStrategy = {}));
|
|
50
|
+
function createActionKey(action) {
|
|
51
|
+
switch (action.actionType) {
|
|
52
|
+
case dispatcher_1.ActionType.START_GET_GROUP_KEY:
|
|
53
|
+
return `${action.getGroupKeyRunId}/${action.retryCount}`;
|
|
54
|
+
case dispatcher_1.ActionType.CANCEL_STEP_RUN:
|
|
55
|
+
case dispatcher_1.ActionType.START_STEP_RUN:
|
|
56
|
+
case dispatcher_1.ActionType.UNRECOGNIZED:
|
|
57
|
+
return `${action.stepRunId}/${action.retryCount}`;
|
|
58
|
+
default:
|
|
59
|
+
// eslint-disable-next-line no-case-declarations
|
|
60
|
+
const exhaustivenessCheck = action.actionType;
|
|
61
|
+
throw new Error(`Unhandled action type: ${exhaustivenessCheck}`);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
48
64
|
class ActionListener {
|
|
49
65
|
constructor(client, workerId, retryInterval = DEFAULT_ACTION_LISTENER_RETRY_INTERVAL, retryCount = DEFAULT_ACTION_LISTENER_RETRY_COUNT) {
|
|
50
66
|
this.lastConnectionAttempt = 0;
|
|
@@ -9,7 +9,7 @@ import { ClientConfig } from '../client-config';
|
|
|
9
9
|
*/
|
|
10
10
|
export declare const CreateCronTriggerSchema: z.ZodObject<{
|
|
11
11
|
name: z.ZodString;
|
|
12
|
-
expression: z.
|
|
12
|
+
expression: z.ZodString;
|
|
13
13
|
input: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodAny>>;
|
|
14
14
|
additionalMetadata: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodString>>;
|
|
15
15
|
priority: z.ZodOptional<z.ZodNumber>;
|
|
@@ -17,11 +17,7 @@ const axios_1 = require("axios");
|
|
|
17
17
|
*/
|
|
18
18
|
exports.CreateCronTriggerSchema = zod_1.z.object({
|
|
19
19
|
name: zod_1.z.string(),
|
|
20
|
-
expression: zod_1.z.string()
|
|
21
|
-
// Basic cron validation regex
|
|
22
|
-
const cronRegex = /^(\*|([0-9]|1[0-9]|2[0-9]|3[0-9]|4[0-9]|5[0-9])|\*\/([0-9]|1[0-9]|2[0-3])) (\*|([0-9]|1[0-9]|2[0-3])|\*\/([0-9]|1[0-9]|2[0-3])) (\*|([1-9]|1[0-9]|2[0-9]|3[0-1])|\*\/([1-9]|1[0-9]|2[0-9]|3[0-1])) (\*|([1-9]|1[0-2])|\*\/([1-9]|1[0-2])) (\*|([0-6])|\*\/([0-6]))$/;
|
|
23
|
-
return cronRegex.test(val);
|
|
24
|
-
}, 'Invalid cron expression'),
|
|
20
|
+
expression: zod_1.z.string(),
|
|
25
21
|
input: zod_1.z.record(zod_1.z.any()).optional(),
|
|
26
22
|
additionalMetadata: zod_1.z.record(zod_1.z.string()).optional(),
|
|
27
23
|
priority: zod_1.z.number().optional(),
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { LegacyHatchetClient } from '../hatchet-client';
|
|
2
|
-
import { Action, ActionListener } from '../dispatcher/action-listener';
|
|
2
|
+
import { Action, ActionKey, ActionListener } from '../dispatcher/action-listener';
|
|
3
3
|
import { StepActionEvent, StepActionEventType, GroupKeyActionEvent, GroupKeyActionEventType } from '../../protoc/dispatcher';
|
|
4
4
|
import HatchetPromise from '../../util/hatchet-promise/hatchet-promise';
|
|
5
5
|
import { Workflow } from '../../workflow';
|
|
@@ -25,8 +25,8 @@ export declare class V0Worker {
|
|
|
25
25
|
action_registry: ActionRegistry;
|
|
26
26
|
workflow_registry: Array<WorkflowDefinition | Workflow>;
|
|
27
27
|
listener: ActionListener | undefined;
|
|
28
|
-
futures: Record<
|
|
29
|
-
contexts: Record<
|
|
28
|
+
futures: Record<ActionKey, HatchetPromise<any>>;
|
|
29
|
+
contexts: Record<ActionKey, V0Context<any, any>>;
|
|
30
30
|
maxRuns?: number;
|
|
31
31
|
logger: Logger;
|
|
32
32
|
registeredWorkflowPromises: Array<Promise<any>>;
|
package/clients/worker/worker.js
CHANGED
|
@@ -21,6 +21,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
21
21
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
22
22
|
exports.V0Worker = void 0;
|
|
23
23
|
const hatchet_error_1 = __importDefault(require("../../util/errors/hatchet-error"));
|
|
24
|
+
const action_listener_1 = require("../dispatcher/action-listener");
|
|
24
25
|
const dispatcher_1 = require("../../protoc/dispatcher");
|
|
25
26
|
const hatchet_promise_1 = __importDefault(require("../../util/hatchet-promise/hatchet-promise"));
|
|
26
27
|
const workflows_1 = require("../../protoc/workflows");
|
|
@@ -174,7 +175,7 @@ class V0Worker {
|
|
|
174
175
|
try {
|
|
175
176
|
// Note: we always use a DurableContext since its a superset of the Context class
|
|
176
177
|
const context = new step_1.V0DurableContext(action, this.client, this);
|
|
177
|
-
this.contexts[action
|
|
178
|
+
this.contexts[(0, action_listener_1.createActionKey)(action)] = context;
|
|
178
179
|
const step = this.action_registry[actionId];
|
|
179
180
|
if (!step) {
|
|
180
181
|
this.logger.error(`Registered actions: '${Object.keys(this.action_registry).join(', ')}'`);
|
|
@@ -205,8 +206,8 @@ class V0Worker {
|
|
|
205
206
|
}
|
|
206
207
|
finally {
|
|
207
208
|
// delete the run from the futures
|
|
208
|
-
delete this.futures[action
|
|
209
|
-
delete this.contexts[action
|
|
209
|
+
delete this.futures[(0, action_listener_1.createActionKey)(action)];
|
|
210
|
+
delete this.contexts[(0, action_listener_1.createActionKey)(action)];
|
|
210
211
|
}
|
|
211
212
|
});
|
|
212
213
|
const failure = (error) => __awaiter(this, void 0, void 0, function* () {
|
|
@@ -228,8 +229,8 @@ class V0Worker {
|
|
|
228
229
|
}
|
|
229
230
|
finally {
|
|
230
231
|
// delete the run from the futures
|
|
231
|
-
delete this.futures[action
|
|
232
|
-
delete this.contexts[action
|
|
232
|
+
delete this.futures[(0, action_listener_1.createActionKey)(action)];
|
|
233
|
+
delete this.contexts[(0, action_listener_1.createActionKey)(action)];
|
|
233
234
|
}
|
|
234
235
|
});
|
|
235
236
|
const future = new hatchet_promise_1.default((() => __awaiter(this, void 0, void 0, function* () {
|
|
@@ -243,7 +244,7 @@ class V0Worker {
|
|
|
243
244
|
}
|
|
244
245
|
yield success(result);
|
|
245
246
|
}))());
|
|
246
|
-
this.futures[action
|
|
247
|
+
this.futures[(0, action_listener_1.createActionKey)(action)] = future;
|
|
247
248
|
// Send the action event to the dispatcher
|
|
248
249
|
const event = this.getStepActionEvent(action, dispatcher_1.StepActionEventType.STEP_EVENT_TYPE_STARTED, false, undefined, action.retryCount);
|
|
249
250
|
this.client.dispatcher.sendStepActionEvent(event).catch((e) => {
|
|
@@ -266,7 +267,7 @@ class V0Worker {
|
|
|
266
267
|
const { actionId } = action;
|
|
267
268
|
try {
|
|
268
269
|
const context = new step_1.V0Context(action, this.client, this);
|
|
269
|
-
const key = action
|
|
270
|
+
const key = (0, action_listener_1.createActionKey)(action);
|
|
270
271
|
if (!key) {
|
|
271
272
|
this.logger.error(`No group key run id provided for action ${actionId}`);
|
|
272
273
|
return;
|
|
@@ -365,8 +366,8 @@ class V0Worker {
|
|
|
365
366
|
const { stepRunId } = action;
|
|
366
367
|
try {
|
|
367
368
|
this.logger.info(`Cancelling step run ${action.stepRunId}`);
|
|
368
|
-
const future = this.futures[
|
|
369
|
-
const context = this.contexts[
|
|
369
|
+
const future = this.futures[(0, action_listener_1.createActionKey)(action)];
|
|
370
|
+
const context = this.contexts[(0, action_listener_1.createActionKey)(action)];
|
|
370
371
|
if (context && context.controller) {
|
|
371
372
|
context.controller.abort('Cancelled by worker');
|
|
372
373
|
}
|
|
@@ -382,8 +383,8 @@ class V0Worker {
|
|
|
382
383
|
this.logger.error('Could not cancel step run: ', e);
|
|
383
384
|
}
|
|
384
385
|
finally {
|
|
385
|
-
delete this.futures[
|
|
386
|
-
delete this.contexts[
|
|
386
|
+
delete this.futures[(0, action_listener_1.createActionKey)(action)];
|
|
387
|
+
delete this.contexts[(0, action_listener_1.createActionKey)(action)];
|
|
387
388
|
}
|
|
388
389
|
});
|
|
389
390
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@hatchet-dev/typescript-sdk",
|
|
3
|
-
"version": "1.4.
|
|
3
|
+
"version": "1.4.1",
|
|
4
4
|
"description": "Background task orchestration & visibility for developers",
|
|
5
5
|
"types": "dist/index.d.ts",
|
|
6
6
|
"files": [
|
|
@@ -18,6 +18,7 @@
|
|
|
18
18
|
"scripts": {
|
|
19
19
|
"build": "echo 'build hatchet sdk with `npm run tsc:build` to ensure it is not build during the publish step' && exit 0",
|
|
20
20
|
"prepare": "npm run build",
|
|
21
|
+
"postinstall": "node scripts/version-check.js",
|
|
21
22
|
"dump-version": "node -e \"console.log('export const HATCHET_VERSION = \\'' + require('./package.json').version + '\\';');\" > src/version.ts",
|
|
22
23
|
"tsc:build": "npm run dump-version && tsc && resolve-tspaths",
|
|
23
24
|
"test:unit": "jest --testMatch='**/*.test.ts'",
|
|
@@ -32,7 +33,7 @@
|
|
|
32
33
|
"eslint:fix": "eslint \"{src,tests}/**/*.{ts,tsx,js}\" --fix",
|
|
33
34
|
"prettier:check": "prettier \"src/**/*.{ts,tsx}\" --list-different",
|
|
34
35
|
"prettier:fix": "prettier \"src/**/*.{ts,tsx}\" --write",
|
|
35
|
-
"prepublish": "cp package.json dist/package.json; cp README.md dist/",
|
|
36
|
+
"prepublish": "cp package.json dist/package.json; cp README.md dist/; cp -r scripts dist/",
|
|
36
37
|
"publish:ci": "rm -rf ./dist && npm run dump-version && npm run tsc:build && npm run prepublish && cd dist && npm publish --access public --no-git-checks",
|
|
37
38
|
"publish:ci:alpha": "rm -rf ./dist && npm run dump-version && npm run tsc:build && npm run prepublish && cd dist && npm publish --access public --no-git-checks --tag alpha",
|
|
38
39
|
"generate-docs": "typedoc",
|
|
@@ -43,10 +44,10 @@
|
|
|
43
44
|
"license": "MIT",
|
|
44
45
|
"devDependencies": {
|
|
45
46
|
"@tsd/typescript": "^5.8.2",
|
|
46
|
-
"@typescript-eslint/eslint-plugin": "^6.21.0",
|
|
47
|
-
"@typescript-eslint/parser": "^6.21.0",
|
|
48
47
|
"@types/jest": "^29.5.14",
|
|
49
48
|
"@types/node": "^22.13.14",
|
|
49
|
+
"@typescript-eslint/eslint-plugin": "^6.21.0",
|
|
50
|
+
"@typescript-eslint/parser": "^6.21.0",
|
|
50
51
|
"autoprefixer": "^10.4.21",
|
|
51
52
|
"dotenv-cli": "^7.4.4",
|
|
52
53
|
"eslint": "^8.56.0",
|
|
@@ -86,6 +87,7 @@
|
|
|
86
87
|
"nice-grpc-common": "^2.0.2",
|
|
87
88
|
"protobufjs": "^7.4.0",
|
|
88
89
|
"qs": "^6.14.0",
|
|
90
|
+
"semver": "^7.7.1",
|
|
89
91
|
"yaml": "^2.7.1",
|
|
90
92
|
"zod": "^3.24.2"
|
|
91
93
|
},
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
/* eslint-disable no-console */
|
|
2
|
+
const fs = require('fs');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const semver = require('semver');
|
|
5
|
+
|
|
6
|
+
const WARNINGS = {
|
|
7
|
+
'1.4.0':
|
|
8
|
+
'Breaking Changes in v1.4.0: This release fixes a critical bug which makes the runNoWait methods async. You will need to await this method to access the runRef.',
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
try {
|
|
12
|
+
// Get the current package version
|
|
13
|
+
// eslint-disable-next-line global-require
|
|
14
|
+
const currentVersion = require('../package.json').version;
|
|
15
|
+
|
|
16
|
+
// Look for the package.json in various possible locations
|
|
17
|
+
const possiblePaths = [
|
|
18
|
+
// npm
|
|
19
|
+
path.join(process.cwd(), 'package.json'),
|
|
20
|
+
// pnpm
|
|
21
|
+
path.join(process.cwd(), '..', 'package.json'),
|
|
22
|
+
// yarn
|
|
23
|
+
path.join(process.cwd(), '..', '..', 'package.json'),
|
|
24
|
+
// monorepo setup
|
|
25
|
+
path.join(process.cwd(), '..', '..', '..', 'package.json'),
|
|
26
|
+
];
|
|
27
|
+
|
|
28
|
+
let parentPackagePath = null;
|
|
29
|
+
for (const possiblePath of possiblePaths) {
|
|
30
|
+
if (fs.existsSync(possiblePath)) {
|
|
31
|
+
parentPackagePath = possiblePath;
|
|
32
|
+
break;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
if (parentPackagePath) {
|
|
37
|
+
const parentPackage = JSON.parse(fs.readFileSync(parentPackagePath, 'utf8'));
|
|
38
|
+
const dependencies = {
|
|
39
|
+
...parentPackage.dependencies,
|
|
40
|
+
...parentPackage.devDependencies,
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
const installedVersion = dependencies['@hatchet-dev/typescript-sdk'];
|
|
44
|
+
|
|
45
|
+
// If there's no installed version, this is a first-time install
|
|
46
|
+
if (!installedVersion) {
|
|
47
|
+
// Show all warnings for the current version
|
|
48
|
+
for (const [version, warning] of Object.entries(WARNINGS)) {
|
|
49
|
+
if (semver.gte(currentVersion, version)) {
|
|
50
|
+
console.warn('\x1b[33m%s\x1b[0m', warning);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
} else {
|
|
54
|
+
// Check for specific version warnings
|
|
55
|
+
for (const [version, warning] of Object.entries(WARNINGS)) {
|
|
56
|
+
if (semver.gte(currentVersion, version) && semver.lt(installedVersion, version)) {
|
|
57
|
+
console.warn('\x1b[33m%s\x1b[0m', warning);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
} catch (error) {
|
|
63
|
+
// Silently fail - this is just a warning system
|
|
64
|
+
// console.error(error);
|
|
65
|
+
}
|
package/util/batch.d.ts
ADDED
package/util/batch.js
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.batch = batch;
|
|
4
|
+
function batch(payloads, numElm, maxBytes) {
|
|
5
|
+
const batches = [];
|
|
6
|
+
let currentBatchPayloads = [];
|
|
7
|
+
let currentBatchIndices = [];
|
|
8
|
+
let currentBatchSize = 0;
|
|
9
|
+
for (let i = 0; i < payloads.length; i += 1) {
|
|
10
|
+
const request = payloads[i];
|
|
11
|
+
const requestSize = Buffer.byteLength(JSON.stringify(request), 'utf8');
|
|
12
|
+
// Check if adding this request would exceed either the payload limit or batch size
|
|
13
|
+
if (currentBatchPayloads.length > 0 &&
|
|
14
|
+
(currentBatchSize + requestSize > maxBytes || currentBatchPayloads.length >= numElm)) {
|
|
15
|
+
batches.push({
|
|
16
|
+
batchIndex: batches.length,
|
|
17
|
+
payloads: currentBatchPayloads,
|
|
18
|
+
originalIndices: currentBatchIndices,
|
|
19
|
+
});
|
|
20
|
+
currentBatchPayloads = [];
|
|
21
|
+
currentBatchIndices = [];
|
|
22
|
+
currentBatchSize = 0;
|
|
23
|
+
}
|
|
24
|
+
// Add the request to the current batch
|
|
25
|
+
currentBatchPayloads.push(request);
|
|
26
|
+
currentBatchIndices.push(i);
|
|
27
|
+
currentBatchSize += requestSize;
|
|
28
|
+
}
|
|
29
|
+
if (currentBatchPayloads.length > 0) {
|
|
30
|
+
batches.push({
|
|
31
|
+
batchIndex: batches.length,
|
|
32
|
+
payloads: currentBatchPayloads,
|
|
33
|
+
originalIndices: currentBatchIndices,
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
return batches;
|
|
37
|
+
}
|
package/v1/client/admin.d.ts
CHANGED
package/v1/client/admin.js
CHANGED
|
@@ -18,6 +18,7 @@ const workflow_run_ref_1 = __importDefault(require("../../util/workflow-run-ref"
|
|
|
18
18
|
const grpc_helpers_1 = require("../../util/grpc-helpers");
|
|
19
19
|
const child_listener_client_1 = require("../../clients/listeners/run-listener/child-listener-client");
|
|
20
20
|
const workflows_1 = require("../../protoc/workflows");
|
|
21
|
+
const batch_1 = require("../../util/batch");
|
|
21
22
|
class AdminClient {
|
|
22
23
|
constructor(config, api, runs) {
|
|
23
24
|
this.config = config;
|
|
@@ -75,17 +76,16 @@ class AdminClient {
|
|
|
75
76
|
? JSON.stringify(options.additionalMetadata)
|
|
76
77
|
: undefined });
|
|
77
78
|
});
|
|
78
|
-
const
|
|
79
|
+
const limit = 4 * 1024 * 1024; // FIXME configurable GRPC limit
|
|
80
|
+
const batches = (0, batch_1.batch)(workflowRequests, batchSize, limit);
|
|
79
81
|
this.logger.debug(`batching ${batches.length} batches`);
|
|
80
82
|
try {
|
|
81
|
-
const
|
|
82
|
-
//
|
|
83
|
-
for (
|
|
84
|
-
const batch = batches[batchIndex];
|
|
85
|
-
const { requests, originalIndices } = batch;
|
|
83
|
+
const results = [];
|
|
84
|
+
// for loop to ensure serial execution of batches
|
|
85
|
+
for (const { payloads, originalIndices, batchIndex } of batches) {
|
|
86
86
|
// Call the bulk trigger workflow method for this batch
|
|
87
87
|
const bulkTriggerWorkflowResponse = yield this.grpc.bulkTriggerWorkflow(workflows_1.BulkTriggerWorkflowRequest.create({
|
|
88
|
-
workflows:
|
|
88
|
+
workflows: payloads,
|
|
89
89
|
}));
|
|
90
90
|
this.logger.debug(`batch ${batchIndex + 1} of ${batches.length}`);
|
|
91
91
|
// Map the results back to their original indices
|
|
@@ -94,54 +94,15 @@ class AdminClient {
|
|
|
94
94
|
const { options } = workflowRuns[originalIndex];
|
|
95
95
|
return new workflow_run_ref_1.default(resp, this.listenerClient, this.runs, options === null || options === void 0 ? void 0 : options.parentId);
|
|
96
96
|
});
|
|
97
|
-
|
|
97
|
+
results.push(...batchResults);
|
|
98
98
|
}
|
|
99
|
-
return
|
|
99
|
+
return results;
|
|
100
100
|
}
|
|
101
101
|
catch (e) {
|
|
102
102
|
throw new hatchet_error_1.default(e.message);
|
|
103
103
|
}
|
|
104
104
|
});
|
|
105
105
|
}
|
|
106
|
-
batchWorkflows(workflowRequests, batchSize) {
|
|
107
|
-
const payloadLimit = 4 * 1024 * 1024; // 4MB limit
|
|
108
|
-
const batches = [];
|
|
109
|
-
let currentBatch = [];
|
|
110
|
-
let currentBatchIndices = [];
|
|
111
|
-
let currentBatchSize = 0;
|
|
112
|
-
for (let i = 0; i < workflowRequests.length; i += 1) {
|
|
113
|
-
const request = workflowRequests[i];
|
|
114
|
-
const requestSize = Buffer.byteLength(JSON.stringify(request), 'utf8');
|
|
115
|
-
// Check if adding this request would exceed either the payload limit or batch size
|
|
116
|
-
if (currentBatchSize + requestSize > payloadLimit || currentBatch.length >= batchSize) {
|
|
117
|
-
// If we have a batch, add it to batches
|
|
118
|
-
if (currentBatch.length > 0) {
|
|
119
|
-
batches.push({
|
|
120
|
-
requests: currentBatch,
|
|
121
|
-
originalIndices: currentBatchIndices,
|
|
122
|
-
});
|
|
123
|
-
}
|
|
124
|
-
// Start a new batch
|
|
125
|
-
currentBatch = [request];
|
|
126
|
-
currentBatchIndices = [i];
|
|
127
|
-
currentBatchSize = requestSize;
|
|
128
|
-
}
|
|
129
|
-
else {
|
|
130
|
-
// Add to current batch
|
|
131
|
-
currentBatch.push(request);
|
|
132
|
-
currentBatchIndices.push(i);
|
|
133
|
-
currentBatchSize += requestSize;
|
|
134
|
-
}
|
|
135
|
-
}
|
|
136
|
-
// Add the last batch if it exists
|
|
137
|
-
if (currentBatch.length > 0) {
|
|
138
|
-
batches.push({
|
|
139
|
-
requests: currentBatch,
|
|
140
|
-
originalIndices: currentBatchIndices,
|
|
141
|
-
});
|
|
142
|
-
}
|
|
143
|
-
return batches;
|
|
144
|
-
}
|
|
145
106
|
putRateLimit(key, limit, duration) {
|
|
146
107
|
return __awaiter(this, void 0, void 0, function* () {
|
|
147
108
|
yield this.grpc.putRateLimit({
|
|
@@ -51,14 +51,14 @@ export declare class Worker {
|
|
|
51
51
|
* @param workflows - Array of workflows to register
|
|
52
52
|
* @returns Array of registered workflow promises
|
|
53
53
|
*/
|
|
54
|
-
registerWorkflows(workflows?: Array<BaseWorkflowDeclaration<any, any> | V0Workflow>): Promise<void
|
|
54
|
+
registerWorkflows(workflows?: Array<BaseWorkflowDeclaration<any, any> | V0Workflow>): Promise<void>;
|
|
55
55
|
/**
|
|
56
56
|
* Registers a single workflow with the worker
|
|
57
57
|
* @param workflow - The workflow to register
|
|
58
58
|
* @returns A promise that resolves when the workflow is registered
|
|
59
59
|
* @deprecated use registerWorkflows instead
|
|
60
60
|
*/
|
|
61
|
-
registerWorkflow(workflow: BaseWorkflowDeclaration<any, any> | V0Workflow): Promise<void
|
|
61
|
+
registerWorkflow(workflow: BaseWorkflowDeclaration<any, any> | V0Workflow): Promise<void>;
|
|
62
62
|
/**
|
|
63
63
|
* Starts the worker
|
|
64
64
|
* @returns Promise that resolves when the worker is stopped or killed
|
|
@@ -50,10 +50,10 @@ class Worker {
|
|
|
50
50
|
*/
|
|
51
51
|
registerWorkflows(workflows) {
|
|
52
52
|
return __awaiter(this, void 0, void 0, function* () {
|
|
53
|
-
|
|
53
|
+
for (const wf of workflows || []) {
|
|
54
54
|
if (wf instanceof declaration_1.BaseWorkflowDeclaration) {
|
|
55
55
|
// TODO check if tenant is V1
|
|
56
|
-
|
|
56
|
+
yield this.nonDurable.registerWorkflowV1(wf);
|
|
57
57
|
if (wf.definition._durableTasks.length > 0) {
|
|
58
58
|
if (!this.durable) {
|
|
59
59
|
const opts = Object.assign(Object.assign({ name: `${this.name}-durable` }, this.config), { maxRuns: this.config.durableSlots || DEFAULT_DURABLE_SLOTS });
|
|
@@ -62,11 +62,12 @@ class Worker {
|
|
|
62
62
|
}
|
|
63
63
|
this.durable.registerDurableActionsV1(wf.definition);
|
|
64
64
|
}
|
|
65
|
-
return register;
|
|
66
65
|
}
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
66
|
+
else {
|
|
67
|
+
// fallback to v0 client for backwards compatibility
|
|
68
|
+
yield this.nonDurable.registerWorkflow(wf);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
70
71
|
});
|
|
71
72
|
}
|
|
72
73
|
/**
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.hatchet = void 0;
|
|
7
|
+
const sdk_1 = __importDefault(require("../../../sdk"));
|
|
8
|
+
exports.hatchet = sdk_1.default.init();
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
export declare function processImageTask(req: {
|
|
2
|
+
body: {
|
|
3
|
+
imageUrl: string;
|
|
4
|
+
filters: string[];
|
|
5
|
+
};
|
|
6
|
+
}): Promise<{
|
|
7
|
+
success: boolean;
|
|
8
|
+
processedUrl: string;
|
|
9
|
+
}>;
|
|
10
|
+
type ImageProcessInput = {
|
|
11
|
+
imageUrl: string;
|
|
12
|
+
filters: string[];
|
|
13
|
+
};
|
|
14
|
+
type ImageProcessOutput = {
|
|
15
|
+
processedUrl: string;
|
|
16
|
+
metadata: {
|
|
17
|
+
size: number;
|
|
18
|
+
format: string;
|
|
19
|
+
appliedFilters: string[];
|
|
20
|
+
};
|
|
21
|
+
};
|
|
22
|
+
export declare const imageProcessor: import("../..").TaskWorkflowDeclaration<ImageProcessInput, ImageProcessOutput>;
|
|
23
|
+
export {};
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.imageProcessor = void 0;
|
|
13
|
+
exports.processImageTask = processImageTask;
|
|
14
|
+
const hatchet_client_1 = require("./hatchet-client");
|
|
15
|
+
function processImage(imageUrl, filters) {
|
|
16
|
+
// Do some image processing
|
|
17
|
+
return Promise.resolve({ url: imageUrl, size: 100, format: 'png' });
|
|
18
|
+
}
|
|
19
|
+
// ❓ Before (Mergent)
|
|
20
|
+
function processImageTask(req) {
|
|
21
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
22
|
+
const { imageUrl, filters } = req.body;
|
|
23
|
+
try {
|
|
24
|
+
const result = yield processImage(imageUrl, filters);
|
|
25
|
+
return { success: true, processedUrl: result.url };
|
|
26
|
+
}
|
|
27
|
+
catch (error) {
|
|
28
|
+
console.error('Image processing failed:', error);
|
|
29
|
+
throw error;
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
exports.imageProcessor = hatchet_client_1.hatchet.task({
|
|
34
|
+
name: 'image-processor',
|
|
35
|
+
retries: 3,
|
|
36
|
+
executionTimeout: '10m',
|
|
37
|
+
fn: (_a) => __awaiter(void 0, [_a], void 0, function* ({ imageUrl, filters }) {
|
|
38
|
+
// Do some image processing
|
|
39
|
+
const result = yield processImage(imageUrl, filters);
|
|
40
|
+
if (!result.url)
|
|
41
|
+
throw new Error('Processing failed to generate URL');
|
|
42
|
+
return {
|
|
43
|
+
processedUrl: result.url,
|
|
44
|
+
metadata: {
|
|
45
|
+
size: result.size,
|
|
46
|
+
format: result.format,
|
|
47
|
+
appliedFilters: filters,
|
|
48
|
+
},
|
|
49
|
+
};
|
|
50
|
+
}),
|
|
51
|
+
});
|
|
52
|
+
// !!
|
|
53
|
+
function run() {
|
|
54
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
55
|
+
// ❓ Running a task (Mergent)
|
|
56
|
+
const options = {
|
|
57
|
+
method: 'POST',
|
|
58
|
+
headers: { Authorization: 'Bearer <token>', 'Content-Type': 'application/json' },
|
|
59
|
+
body: JSON.stringify({
|
|
60
|
+
name: '4cf95241-fa19-47ef-8a67-71e483747649',
|
|
61
|
+
queue: 'default',
|
|
62
|
+
request: {
|
|
63
|
+
url: 'https://example.com',
|
|
64
|
+
headers: { Authorization: 'fake-secret-token', 'Content-Type': 'application/json' },
|
|
65
|
+
body: 'Hello, world!',
|
|
66
|
+
},
|
|
67
|
+
}),
|
|
68
|
+
};
|
|
69
|
+
fetch('https://api.mergent.co/v2/tasks', options)
|
|
70
|
+
.then((response) => response.json())
|
|
71
|
+
.then((response) => console.log(response))
|
|
72
|
+
.catch((err) => console.error(err));
|
|
73
|
+
// !!
|
|
74
|
+
// ❓ Running a task (Hatchet)
|
|
75
|
+
const result = yield exports.imageProcessor.run({
|
|
76
|
+
imageUrl: 'https://example.com/image.png',
|
|
77
|
+
filters: ['blur'],
|
|
78
|
+
});
|
|
79
|
+
// you can await fully typed results
|
|
80
|
+
console.log(result);
|
|
81
|
+
// !!
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
function schedule() {
|
|
85
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
86
|
+
// ❓ Scheduling tasks (Mergent)
|
|
87
|
+
const options = {
|
|
88
|
+
// same options as before
|
|
89
|
+
body: JSON.stringify({
|
|
90
|
+
// same body as before
|
|
91
|
+
delay: '5m',
|
|
92
|
+
}),
|
|
93
|
+
};
|
|
94
|
+
// !!
|
|
95
|
+
// ❓ Scheduling tasks (Hatchet)
|
|
96
|
+
// Schedule the task to run at a specific time
|
|
97
|
+
const runAt = new Date(Date.now() + 1000 * 60 * 60 * 24);
|
|
98
|
+
exports.imageProcessor.schedule(runAt, {
|
|
99
|
+
imageUrl: 'https://example.com/image.png',
|
|
100
|
+
filters: ['blur'],
|
|
101
|
+
});
|
|
102
|
+
// Schedule the task to run every hour
|
|
103
|
+
exports.imageProcessor.cron('run-hourly', '0 * * * *', {
|
|
104
|
+
imageUrl: 'https://example.com/image.png',
|
|
105
|
+
filters: ['blur'],
|
|
106
|
+
});
|
|
107
|
+
// !!
|
|
108
|
+
});
|
|
109
|
+
}
|
package/v1/index.d.ts
CHANGED
package/v1/index.js
CHANGED
package/version.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const HATCHET_VERSION = "1.4.
|
|
1
|
+
export declare const HATCHET_VERSION = "1.4.1";
|
package/version.js
CHANGED