@mastra/client-js 0.1.14-alpha.1 → 0.1.14-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +6 -6
- package/CHANGELOG.md +10 -0
- package/dist/index.cjs +20 -28
- package/dist/index.js +20 -28
- package/package.json +3 -3
- package/src/resources/workflow.ts +41 -43
package/.turbo/turbo-build.log
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
|
|
2
|
-
> @mastra/client-js@0.1.14-alpha.
|
|
2
|
+
> @mastra/client-js@0.1.14-alpha.2 build /home/runner/work/mastra/mastra/client-sdks/client-js
|
|
3
3
|
> tsup src/index.ts --format esm,cjs --dts --clean --treeshake=smallest --splitting
|
|
4
4
|
|
|
5
5
|
[34mCLI[39m Building entry: src/index.ts
|
|
@@ -9,11 +9,11 @@
|
|
|
9
9
|
[34mCLI[39m Cleaning output folder
|
|
10
10
|
[34mESM[39m Build start
|
|
11
11
|
[34mCJS[39m Build start
|
|
12
|
-
[32mESM[39m [1mdist/index.js [22m[32m21.
|
|
13
|
-
[32mESM[39m ⚡️ Build success in
|
|
14
|
-
[32mCJS[39m [1mdist/index.cjs [22m[32m21.
|
|
15
|
-
[32mCJS[39m ⚡️ Build success in
|
|
12
|
+
[32mESM[39m [1mdist/index.js [22m[32m21.12 KB[39m
|
|
13
|
+
[32mESM[39m ⚡️ Build success in 893ms
|
|
14
|
+
[32mCJS[39m [1mdist/index.cjs [22m[32m21.30 KB[39m
|
|
15
|
+
[32mCJS[39m ⚡️ Build success in 900ms
|
|
16
16
|
[34mDTS[39m Build start
|
|
17
|
-
[32mDTS[39m ⚡️ Build success in
|
|
17
|
+
[32mDTS[39m ⚡️ Build success in 12468ms
|
|
18
18
|
[32mDTS[39m [1mdist/index.d.ts [22m[32m18.89 KB[39m
|
|
19
19
|
[32mDTS[39m [1mdist/index.d.cts [22m[32m18.89 KB[39m
|
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,15 @@
|
|
|
1
1
|
# @mastra/client-js
|
|
2
2
|
|
|
3
|
+
## 0.1.14-alpha.2
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 84fe241: Improve streaming of workflows
|
|
8
|
+
- Updated dependencies [56c31b7]
|
|
9
|
+
- Updated dependencies [dbbbf80]
|
|
10
|
+
- Updated dependencies [99d43b9]
|
|
11
|
+
- @mastra/core@0.8.0-alpha.2
|
|
12
|
+
|
|
3
13
|
## 0.1.14-alpha.1
|
|
4
14
|
|
|
5
15
|
### Patch Changes
|
package/dist/index.cjs
CHANGED
|
@@ -359,6 +359,7 @@ var Vector = class extends BaseResource {
|
|
|
359
359
|
};
|
|
360
360
|
|
|
361
361
|
// src/resources/workflow.ts
|
|
362
|
+
var RECORD_SEPARATOR = "";
|
|
362
363
|
var Workflow = class extends BaseResource {
|
|
363
364
|
constructor(options, workflowId) {
|
|
364
365
|
super(options);
|
|
@@ -460,43 +461,34 @@ var Workflow = class extends BaseResource {
|
|
|
460
461
|
*/
|
|
461
462
|
async *streamProcessor(stream) {
|
|
462
463
|
const reader = stream.getReader();
|
|
464
|
+
let doneReading = false;
|
|
463
465
|
let buffer = "";
|
|
464
466
|
try {
|
|
465
|
-
while (
|
|
467
|
+
while (!doneReading) {
|
|
466
468
|
const { done, value } = await reader.read();
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
469
|
+
doneReading = done;
|
|
470
|
+
if (done && !value) continue;
|
|
471
|
+
try {
|
|
472
|
+
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
473
|
+
const chunks = (buffer + decoded).split(RECORD_SEPARATOR);
|
|
474
|
+
buffer = chunks.pop() || "";
|
|
475
|
+
for (const chunk of chunks) {
|
|
476
|
+
if (chunk) {
|
|
477
|
+
yield JSON.parse(chunk);
|
|
474
478
|
}
|
|
475
479
|
}
|
|
476
|
-
|
|
480
|
+
} catch (error) {
|
|
477
481
|
}
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
try {
|
|
484
|
-
const parsedRecord = JSON.parse(record);
|
|
485
|
-
const isWorkflowCompleted = Object.values(parsedRecord?.activePaths || {}).every(
|
|
486
|
-
(path) => path.status === "completed" || path.status === "suspended" || path.status === "failed" || path.status === "skipped"
|
|
487
|
-
);
|
|
488
|
-
if (isWorkflowCompleted) {
|
|
489
|
-
reader.cancel();
|
|
490
|
-
}
|
|
491
|
-
yield parsedRecord;
|
|
492
|
-
} catch (e) {
|
|
493
|
-
throw new Error(`Could not parse record: ${record}, ${e}`);
|
|
494
|
-
}
|
|
495
|
-
}
|
|
482
|
+
}
|
|
483
|
+
if (buffer) {
|
|
484
|
+
try {
|
|
485
|
+
yield JSON.parse(buffer);
|
|
486
|
+
} catch {
|
|
496
487
|
}
|
|
497
488
|
}
|
|
498
489
|
} finally {
|
|
499
|
-
reader.cancel()
|
|
490
|
+
reader.cancel().catch(() => {
|
|
491
|
+
});
|
|
500
492
|
}
|
|
501
493
|
}
|
|
502
494
|
/**
|
package/dist/index.js
CHANGED
|
@@ -357,6 +357,7 @@ var Vector = class extends BaseResource {
|
|
|
357
357
|
};
|
|
358
358
|
|
|
359
359
|
// src/resources/workflow.ts
|
|
360
|
+
var RECORD_SEPARATOR = "";
|
|
360
361
|
var Workflow = class extends BaseResource {
|
|
361
362
|
constructor(options, workflowId) {
|
|
362
363
|
super(options);
|
|
@@ -458,43 +459,34 @@ var Workflow = class extends BaseResource {
|
|
|
458
459
|
*/
|
|
459
460
|
async *streamProcessor(stream) {
|
|
460
461
|
const reader = stream.getReader();
|
|
462
|
+
let doneReading = false;
|
|
461
463
|
let buffer = "";
|
|
462
464
|
try {
|
|
463
|
-
while (
|
|
465
|
+
while (!doneReading) {
|
|
464
466
|
const { done, value } = await reader.read();
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
467
|
+
doneReading = done;
|
|
468
|
+
if (done && !value) continue;
|
|
469
|
+
try {
|
|
470
|
+
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
471
|
+
const chunks = (buffer + decoded).split(RECORD_SEPARATOR);
|
|
472
|
+
buffer = chunks.pop() || "";
|
|
473
|
+
for (const chunk of chunks) {
|
|
474
|
+
if (chunk) {
|
|
475
|
+
yield JSON.parse(chunk);
|
|
472
476
|
}
|
|
473
477
|
}
|
|
474
|
-
|
|
478
|
+
} catch (error) {
|
|
475
479
|
}
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
try {
|
|
482
|
-
const parsedRecord = JSON.parse(record);
|
|
483
|
-
const isWorkflowCompleted = Object.values(parsedRecord?.activePaths || {}).every(
|
|
484
|
-
(path) => path.status === "completed" || path.status === "suspended" || path.status === "failed" || path.status === "skipped"
|
|
485
|
-
);
|
|
486
|
-
if (isWorkflowCompleted) {
|
|
487
|
-
reader.cancel();
|
|
488
|
-
}
|
|
489
|
-
yield parsedRecord;
|
|
490
|
-
} catch (e) {
|
|
491
|
-
throw new Error(`Could not parse record: ${record}, ${e}`);
|
|
492
|
-
}
|
|
493
|
-
}
|
|
480
|
+
}
|
|
481
|
+
if (buffer) {
|
|
482
|
+
try {
|
|
483
|
+
yield JSON.parse(buffer);
|
|
484
|
+
} catch {
|
|
494
485
|
}
|
|
495
486
|
}
|
|
496
487
|
} finally {
|
|
497
|
-
reader.cancel()
|
|
488
|
+
reader.cancel().catch(() => {
|
|
489
|
+
});
|
|
498
490
|
}
|
|
499
491
|
}
|
|
500
492
|
/**
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mastra/client-js",
|
|
3
|
-
"version": "0.1.14-alpha.
|
|
3
|
+
"version": "0.1.14-alpha.2",
|
|
4
4
|
"description": "The official TypeScript library for the Mastra Client API",
|
|
5
5
|
"author": "",
|
|
6
6
|
"type": "module",
|
|
@@ -26,11 +26,11 @@
|
|
|
26
26
|
"json-schema": "^0.4.0",
|
|
27
27
|
"zod": "^3.24.2",
|
|
28
28
|
"zod-to-json-schema": "^3.24.3",
|
|
29
|
-
"@mastra/core": "^0.8.0-alpha.
|
|
29
|
+
"@mastra/core": "^0.8.0-alpha.2"
|
|
30
30
|
},
|
|
31
31
|
"devDependencies": {
|
|
32
32
|
"@babel/preset-env": "^7.26.9",
|
|
33
|
-
"@babel/preset-typescript": "^7.
|
|
33
|
+
"@babel/preset-typescript": "^7.27.0",
|
|
34
34
|
"@tsconfig/recommended": "^1.0.8",
|
|
35
35
|
"@types/json-schema": "^7.0.15",
|
|
36
36
|
"@types/node": "^20.17.27",
|
|
@@ -2,6 +2,8 @@ import type { GetWorkflowResponse, ClientOptions, WorkflowRunResult } from '../t
|
|
|
2
2
|
|
|
3
3
|
import { BaseResource } from './base';
|
|
4
4
|
|
|
5
|
+
const RECORD_SEPARATOR = '\x1E';
|
|
6
|
+
|
|
5
7
|
export class Workflow extends BaseResource {
|
|
6
8
|
constructor(
|
|
7
9
|
options: ClientOptions,
|
|
@@ -119,61 +121,57 @@ export class Workflow extends BaseResource {
|
|
|
119
121
|
*/
|
|
120
122
|
private async *streamProcessor(stream: ReadableStream): AsyncGenerator<WorkflowRunResult, void, unknown> {
|
|
121
123
|
const reader = stream.getReader();
|
|
124
|
+
|
|
125
|
+
// Track if we've finished reading from the stream
|
|
126
|
+
let doneReading = false;
|
|
127
|
+
// Buffer to accumulate partial chunks
|
|
122
128
|
let buffer = '';
|
|
123
129
|
|
|
124
130
|
try {
|
|
125
|
-
while (
|
|
131
|
+
while (!doneReading) {
|
|
132
|
+
// Read the next chunk from the stream
|
|
126
133
|
const { done, value } = await reader.read();
|
|
134
|
+
doneReading = done;
|
|
135
|
+
|
|
136
|
+
// Skip processing if we're done and there's no value
|
|
137
|
+
if (done && !value) continue;
|
|
138
|
+
|
|
139
|
+
try {
|
|
140
|
+
// Decode binary data to text
|
|
141
|
+
const decoded = value ? new TextDecoder().decode(value) : '';
|
|
142
|
+
|
|
143
|
+
// Split the combined buffer and new data by record separator
|
|
144
|
+
const chunks = (buffer + decoded).split(RECORD_SEPARATOR);
|
|
127
145
|
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
146
|
+
// The last chunk might be incomplete, so save it for the next iteration
|
|
147
|
+
buffer = chunks.pop() || '';
|
|
148
|
+
|
|
149
|
+
// Process complete chunks
|
|
150
|
+
for (const chunk of chunks) {
|
|
151
|
+
if (chunk) {
|
|
152
|
+
// Only process non-empty chunks
|
|
153
|
+
yield JSON.parse(chunk);
|
|
136
154
|
}
|
|
137
155
|
}
|
|
138
|
-
|
|
156
|
+
} catch (error) {
|
|
157
|
+
// Silently ignore parsing errors to maintain stream processing
|
|
158
|
+
// This allows the stream to continue even if one record is malformed
|
|
139
159
|
}
|
|
160
|
+
}
|
|
140
161
|
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
// Keep the last (potentially incomplete) chunk in the buffer
|
|
148
|
-
buffer = records.pop() || '';
|
|
149
|
-
|
|
150
|
-
// Process each complete record
|
|
151
|
-
for (const record of records) {
|
|
152
|
-
if (record.trim().length > 0) {
|
|
153
|
-
try {
|
|
154
|
-
// Assuming the records are JSON strings
|
|
155
|
-
const parsedRecord = JSON.parse(record);
|
|
156
|
-
|
|
157
|
-
//Check to see if all steps are completed and cancel reader
|
|
158
|
-
const isWorkflowCompleted = Object.values(parsedRecord?.activePaths || {}).every(
|
|
159
|
-
(path: any) =>
|
|
160
|
-
path.status === 'completed' ||
|
|
161
|
-
path.status === 'suspended' ||
|
|
162
|
-
path.status === 'failed' ||
|
|
163
|
-
path.status === 'skipped',
|
|
164
|
-
);
|
|
165
|
-
if (isWorkflowCompleted) {
|
|
166
|
-
reader.cancel();
|
|
167
|
-
}
|
|
168
|
-
yield parsedRecord;
|
|
169
|
-
} catch (e) {
|
|
170
|
-
throw new Error(`Could not parse record: ${record}, ${e}`);
|
|
171
|
-
}
|
|
172
|
-
}
|
|
162
|
+
// Process any remaining data in the buffer after stream is done
|
|
163
|
+
if (buffer) {
|
|
164
|
+
try {
|
|
165
|
+
yield JSON.parse(buffer);
|
|
166
|
+
} catch {
|
|
167
|
+
// Ignore parsing error for final chunk
|
|
173
168
|
}
|
|
174
169
|
}
|
|
175
170
|
} finally {
|
|
176
|
-
reader
|
|
171
|
+
// Always ensure we clean up the reader
|
|
172
|
+
reader.cancel().catch(() => {
|
|
173
|
+
// Ignore cancel errors
|
|
174
|
+
});
|
|
177
175
|
}
|
|
178
176
|
}
|
|
179
177
|
|