@aigne/core 1.63.0-beta.9 → 1.63.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +33 -0
- package/lib/cjs/agents/agent.js +1 -1
- package/lib/cjs/agents/ai-agent.js +35 -5
- package/lib/cjs/agents/chat-model.js +1 -1
- package/lib/cjs/agents/team-agent.js +35 -2
- package/lib/cjs/utils/model-utils.js +1 -0
- package/lib/cjs/utils/queue.d.ts +57 -0
- package/lib/cjs/utils/queue.js +318 -0
- package/lib/dts/utils/queue.d.ts +57 -0
- package/lib/esm/agents/agent.js +1 -1
- package/lib/esm/agents/ai-agent.js +1 -1
- package/lib/esm/agents/chat-model.js +1 -1
- package/lib/esm/agents/team-agent.js +1 -1
- package/lib/esm/utils/model-utils.js +1 -0
- package/lib/esm/utils/queue.d.ts +57 -0
- package/lib/esm/utils/queue.js +315 -0
- package/package.json +3 -4
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,38 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [1.63.0](https://github.com/AIGNE-io/aigne-framework/compare/core-v1.63.0-beta.12...core-v1.63.0) (2025-10-19)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Dependencies
|
|
7
|
+
|
|
8
|
+
* The following workspace dependencies were updated
|
|
9
|
+
* dependencies
|
|
10
|
+
* @aigne/afs bumped to 1.1.0
|
|
11
|
+
* @aigne/observability-api bumped to 0.11.2
|
|
12
|
+
|
|
13
|
+
## [1.63.0-beta.12](https://github.com/AIGNE-io/aigne-framework/compare/core-v1.63.0-beta.11...core-v1.63.0-beta.12) (2025-10-17)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
### Bug Fixes
|
|
17
|
+
|
|
18
|
+
* custom queue instead of fastq to compatible with browser ([#640](https://github.com/AIGNE-io/aigne-framework/issues/640)) ([51e0f8f](https://github.com/AIGNE-io/aigne-framework/commit/51e0f8fdeaf26f2765f392218f04a2af4c0d2e1a))
|
|
19
|
+
|
|
20
|
+
## [1.63.0-beta.11](https://github.com/AIGNE-io/aigne-framework/compare/core-v1.63.0-beta.10...core-v1.63.0-beta.11) (2025-10-17)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
### Bug Fixes
|
|
24
|
+
|
|
25
|
+
* **gemini:** implement retry mechanism for empty responses with structured output fallback ([#638](https://github.com/AIGNE-io/aigne-framework/issues/638)) ([d33c8bb](https://github.com/AIGNE-io/aigne-framework/commit/d33c8bb9711aadddef9687d6cf472a179cd8ed9c))
|
|
26
|
+
|
|
27
|
+
## [1.63.0-beta.10](https://github.com/AIGNE-io/aigne-framework/compare/core-v1.63.0-beta.9...core-v1.63.0-beta.10) (2025-10-16)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
### Dependencies
|
|
31
|
+
|
|
32
|
+
* The following workspace dependencies were updated
|
|
33
|
+
* dependencies
|
|
34
|
+
* @aigne/observability-api bumped to 0.11.2-beta.5
|
|
35
|
+
|
|
3
36
|
## [1.63.0-beta.9](https://github.com/AIGNE-io/aigne-framework/compare/core-v1.63.0-beta.8...core-v1.63.0-beta.9) (2025-10-16)
|
|
4
37
|
|
|
5
38
|
|
package/lib/cjs/agents/agent.js
CHANGED
|
@@ -1,14 +1,44 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var
|
|
3
|
-
|
|
4
|
-
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
5
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
36
|
exports.AIAgent = exports.aiAgentOptionsSchema = exports.aiAgentToolChoiceSchema = exports.AIAgentToolChoice = exports.DEFAULT_OUTPUT_FILE_KEY = exports.DEFAULT_OUTPUT_KEY = void 0;
|
|
7
|
-
const fastq_1 = __importDefault(require("fastq"));
|
|
8
37
|
const zod_1 = require("zod");
|
|
9
38
|
const prompt_builder_js_1 = require("../prompt/prompt-builder.js");
|
|
10
39
|
const structured_stream_instructions_js_1 = require("../prompt/prompts/structured-stream-instructions.js");
|
|
11
40
|
const template_js_1 = require("../prompt/template.js");
|
|
41
|
+
const fastq = __importStar(require("../utils/queue.js"));
|
|
12
42
|
const structured_stream_extractor_js_1 = require("../utils/structured-stream-extractor.js");
|
|
13
43
|
const type_utils_js_1 = require("../utils/type-utils.js");
|
|
14
44
|
const agent_js_1 = require("./agent.js");
|
|
@@ -319,7 +349,7 @@ class AIAgent extends agent_js_1.Agent {
|
|
|
319
349
|
}
|
|
320
350
|
const executedToolCalls = [];
|
|
321
351
|
let error;
|
|
322
|
-
const queue =
|
|
352
|
+
const queue = fastq.promise(async ({ tool, call }) => {
|
|
323
353
|
try {
|
|
324
354
|
// NOTE: should pass both arguments (model generated) and input (user provided) to the tool
|
|
325
355
|
const output = await this.invokeSkill(tool, { ...input, ...call.function.arguments }, options).catch((error) => {
|
|
@@ -42,7 +42,7 @@ const type_utils_js_1 = require("../utils/type-utils.js");
|
|
|
42
42
|
const agent_js_1 = require("./agent.js");
|
|
43
43
|
const model_js_1 = require("./model.js");
|
|
44
44
|
const CHAT_MODEL_DEFAULT_RETRY_OPTIONS = {
|
|
45
|
-
retries:
|
|
45
|
+
retries: 3,
|
|
46
46
|
shouldRetry: async (error) => error instanceof StructuredOutputError || (await Promise.resolve().then(() => __importStar(require("is-network-error")))).default(error),
|
|
47
47
|
};
|
|
48
48
|
class StructuredOutputError extends Error {
|
|
@@ -1,12 +1,45 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
2
35
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
36
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
37
|
};
|
|
5
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
39
|
exports.TeamAgent = exports.DEFAULT_REFLECTION_MAX_ITERATIONS = exports.ProcessMode = void 0;
|
|
7
40
|
const node_assert_1 = __importDefault(require("node:assert"));
|
|
8
|
-
const fastq_1 = __importDefault(require("fastq"));
|
|
9
41
|
const immer_1 = require("immer");
|
|
42
|
+
const fastq = __importStar(require("../utils/queue.js"));
|
|
10
43
|
const stream_utils_js_1 = require("../utils/stream-utils.js");
|
|
11
44
|
const type_utils_js_1 = require("../utils/type-utils.js");
|
|
12
45
|
const agent_js_1 = require("./agent.js");
|
|
@@ -201,7 +234,7 @@ class TeamAgent extends agent_js_1.Agent {
|
|
|
201
234
|
arr = Array.isArray(arr) ? [...arr] : (0, type_utils_js_1.isNil)(arr) ? [arr] : [];
|
|
202
235
|
const results = new Array(arr.length);
|
|
203
236
|
let error;
|
|
204
|
-
const queue =
|
|
237
|
+
const queue = fastq.promise(async ({ item, index }) => {
|
|
205
238
|
try {
|
|
206
239
|
if (!(0, type_utils_js_1.isRecord)(item))
|
|
207
240
|
throw new TypeError(`Expected ${String(key)} to be an object, got ${typeof item}`);
|
|
@@ -5,5 +5,6 @@ function mergeUsage(...usages) {
|
|
|
5
5
|
return {
|
|
6
6
|
inputTokens: usages.reduce((acc, usage) => (usage ? acc + usage.inputTokens : acc), 0),
|
|
7
7
|
outputTokens: usages.reduce((acc, usage) => (usage ? acc + usage.outputTokens : acc), 0),
|
|
8
|
+
aigneHubCredits: usages.reduce((acc, usage) => (usage?.aigneHubCredits ? acc + usage.aigneHubCredits : acc), 0),
|
|
8
9
|
};
|
|
9
10
|
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/** biome-ignore-all lint/style/noNonNullAssertion lint/complexity/useOptionalChain: <!> */
|
|
2
|
+
declare namespace fastq {
|
|
3
|
+
type worker<C, T = any, R = any> = (this: C, task: T, cb: fastq.done<R>) => void;
|
|
4
|
+
type asyncWorker<C, T = any, R = any> = (this: C, task: T) => Promise<R>;
|
|
5
|
+
type done<R = any> = (err: Error | null, result?: R) => void;
|
|
6
|
+
type errorHandler<T = any> = (err: Error, task: T) => void;
|
|
7
|
+
interface queue<T = any, R = any> {
|
|
8
|
+
/** Add a task at the end of the queue. `done(err, result)` will be called when the task was processed. */
|
|
9
|
+
push(task: T, done?: done<R>): void;
|
|
10
|
+
/** Add a task at the beginning of the queue. `done(err, result)` will be called when the task was processed. */
|
|
11
|
+
unshift(task: T, done?: done<R>): void;
|
|
12
|
+
/** Pause the processing of tasks. Currently worked tasks are not stopped. */
|
|
13
|
+
pause(): any;
|
|
14
|
+
/** Resume the processing of tasks. */
|
|
15
|
+
resume(): any;
|
|
16
|
+
running(): number;
|
|
17
|
+
/** Returns `false` if there are tasks being processed or waiting to be processed. `true` otherwise. */
|
|
18
|
+
idle(): boolean;
|
|
19
|
+
/** Returns the number of tasks waiting to be processed (in the queue). */
|
|
20
|
+
length(): number;
|
|
21
|
+
/** Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks */
|
|
22
|
+
getQueue(): T[];
|
|
23
|
+
/** Removes all tasks waiting to be processed, and reset `drain` to an empty function. */
|
|
24
|
+
kill(): any;
|
|
25
|
+
/** Same than `kill` but the `drain` function will be called before reset to empty. */
|
|
26
|
+
killAndDrain(): any;
|
|
27
|
+
/** Set a global error handler. `handler(err, task)` will be called each time a task is completed, `err` will be not null if the task has thrown an error. */
|
|
28
|
+
error(handler: errorHandler<T>): void;
|
|
29
|
+
/** Property that returns the number of concurrent tasks that could be executed in parallel. It can be altered at runtime. */
|
|
30
|
+
concurrency: number;
|
|
31
|
+
/** Property that returns `true` when the queue is in a paused state. */
|
|
32
|
+
paused: boolean;
|
|
33
|
+
/** Function that will be called when the last item from the queue has been processed by a worker. It can be altered at runtime. */
|
|
34
|
+
drain(): any;
|
|
35
|
+
/** Function that will be called when the last item from the queue has been assigned to a worker. It can be altered at runtime. */
|
|
36
|
+
empty: () => void;
|
|
37
|
+
/** Function that will be called when the queue hits the concurrency limit. It can be altered at runtime. */
|
|
38
|
+
saturated: () => void;
|
|
39
|
+
}
|
|
40
|
+
interface queueAsPromised<T = any, R = any> extends queue<T, R> {
|
|
41
|
+
/** Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */
|
|
42
|
+
push(task: T): Promise<R>;
|
|
43
|
+
/** Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */
|
|
44
|
+
unshift(task: T): Promise<R>;
|
|
45
|
+
/** Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. */
|
|
46
|
+
drained(): Promise<void>;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
declare function fastq<C, T = any, R = any>(context: C, worker: fastq.worker<C, T, R>, _concurrency: number): fastq.queue<T, R>;
|
|
50
|
+
declare function fastq<C, T = any, R = any>(worker: fastq.worker<C, T, R>, _concurrency: number): fastq.queue<T, R>;
|
|
51
|
+
declare namespace fastq {
|
|
52
|
+
var promise: typeof queueAsPromised;
|
|
53
|
+
}
|
|
54
|
+
declare function queueAsPromised<C, T = any, R = any>(context: C, worker: fastq.asyncWorker<C, T, R>, _concurrency: number): fastq.queueAsPromised<T, R>;
|
|
55
|
+
declare function queueAsPromised<C, T = any, R = any>(worker: fastq.asyncWorker<C, T, R>, _concurrency: number): fastq.queueAsPromised<T, R>;
|
|
56
|
+
export default fastq;
|
|
57
|
+
export declare const promise: typeof queueAsPromised;
|
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/** biome-ignore-all lint/style/noNonNullAssertion lint/complexity/useOptionalChain: <!> */
|
|
3
|
+
// Cross-platform queue implementation compatible with Node.js and browser
|
|
4
|
+
// Based on fastqueue library but implemented in TypeScript
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.promise = void 0;
|
|
7
|
+
// Cross-platform nextTick implementation
|
|
8
|
+
const nextTick = (() => {
|
|
9
|
+
if (typeof process !== "undefined" && process.nextTick) {
|
|
10
|
+
return process.nextTick;
|
|
11
|
+
}
|
|
12
|
+
if (typeof setImmediate !== "undefined") {
|
|
13
|
+
return setImmediate;
|
|
14
|
+
}
|
|
15
|
+
return (callback) => setTimeout(callback, 0);
|
|
16
|
+
})();
|
|
17
|
+
// Simple object pooling implementation to replace reusify
|
|
18
|
+
function createObjectPool(factory) {
|
|
19
|
+
const pool = [];
|
|
20
|
+
return {
|
|
21
|
+
get() {
|
|
22
|
+
return pool.pop() || factory();
|
|
23
|
+
},
|
|
24
|
+
release(obj) {
|
|
25
|
+
pool.push(obj);
|
|
26
|
+
},
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
function noop() { }
|
|
30
|
+
function createTask() {
|
|
31
|
+
const task = {
|
|
32
|
+
value: null,
|
|
33
|
+
callback: noop,
|
|
34
|
+
next: null,
|
|
35
|
+
release: noop,
|
|
36
|
+
context: null,
|
|
37
|
+
errorHandler: null,
|
|
38
|
+
worked: function worked(err, result) {
|
|
39
|
+
const callback = task.callback;
|
|
40
|
+
const errorHandler = task.errorHandler;
|
|
41
|
+
const val = task.value;
|
|
42
|
+
task.value = null;
|
|
43
|
+
task.callback = noop;
|
|
44
|
+
if (task.errorHandler && err) {
|
|
45
|
+
errorHandler(err, val);
|
|
46
|
+
}
|
|
47
|
+
callback.call(task.context, err, result);
|
|
48
|
+
task.release(task);
|
|
49
|
+
},
|
|
50
|
+
};
|
|
51
|
+
return task;
|
|
52
|
+
}
|
|
53
|
+
function fastq(contextOrWorker, workerOrConcurrency, _concurrency) {
|
|
54
|
+
let context = null;
|
|
55
|
+
let worker;
|
|
56
|
+
let concurrency;
|
|
57
|
+
if (typeof contextOrWorker === "function") {
|
|
58
|
+
context = null;
|
|
59
|
+
worker = contextOrWorker;
|
|
60
|
+
concurrency = workerOrConcurrency;
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
context = contextOrWorker;
|
|
64
|
+
worker = workerOrConcurrency;
|
|
65
|
+
concurrency = _concurrency;
|
|
66
|
+
}
|
|
67
|
+
if (!(concurrency >= 1)) {
|
|
68
|
+
throw new Error("fastq concurrency must be equal to or greater than 1");
|
|
69
|
+
}
|
|
70
|
+
const cache = createObjectPool(() => createTask());
|
|
71
|
+
let queueHead = null;
|
|
72
|
+
let queueTail = null;
|
|
73
|
+
let _running = 0;
|
|
74
|
+
let errorHandler = null;
|
|
75
|
+
const self = {
|
|
76
|
+
push,
|
|
77
|
+
drain: noop,
|
|
78
|
+
saturated: noop,
|
|
79
|
+
pause,
|
|
80
|
+
paused: false,
|
|
81
|
+
get concurrency() {
|
|
82
|
+
return concurrency;
|
|
83
|
+
},
|
|
84
|
+
set concurrency(value) {
|
|
85
|
+
if (!(value >= 1)) {
|
|
86
|
+
throw new Error("fastq concurrency must be equal to or greater than 1");
|
|
87
|
+
}
|
|
88
|
+
concurrency = value;
|
|
89
|
+
if (self.paused)
|
|
90
|
+
return;
|
|
91
|
+
for (; queueHead && _running < concurrency;) {
|
|
92
|
+
_running++;
|
|
93
|
+
release();
|
|
94
|
+
}
|
|
95
|
+
},
|
|
96
|
+
running,
|
|
97
|
+
resume,
|
|
98
|
+
idle,
|
|
99
|
+
length,
|
|
100
|
+
getQueue,
|
|
101
|
+
unshift,
|
|
102
|
+
empty: noop,
|
|
103
|
+
kill,
|
|
104
|
+
killAndDrain,
|
|
105
|
+
error,
|
|
106
|
+
};
|
|
107
|
+
return self;
|
|
108
|
+
function running() {
|
|
109
|
+
return _running;
|
|
110
|
+
}
|
|
111
|
+
function pause() {
|
|
112
|
+
self.paused = true;
|
|
113
|
+
}
|
|
114
|
+
function length() {
|
|
115
|
+
let current = queueHead;
|
|
116
|
+
let counter = 0;
|
|
117
|
+
while (current) {
|
|
118
|
+
current = current.next;
|
|
119
|
+
counter++;
|
|
120
|
+
}
|
|
121
|
+
return counter;
|
|
122
|
+
}
|
|
123
|
+
function getQueue() {
|
|
124
|
+
let current = queueHead;
|
|
125
|
+
const tasks = [];
|
|
126
|
+
while (current) {
|
|
127
|
+
if (current.value !== null) {
|
|
128
|
+
tasks.push(current.value);
|
|
129
|
+
}
|
|
130
|
+
current = current.next;
|
|
131
|
+
}
|
|
132
|
+
return tasks;
|
|
133
|
+
}
|
|
134
|
+
function resume() {
|
|
135
|
+
if (!self.paused)
|
|
136
|
+
return;
|
|
137
|
+
self.paused = false;
|
|
138
|
+
for (; queueHead && _running < concurrency;) {
|
|
139
|
+
_running++;
|
|
140
|
+
release();
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
function idle() {
|
|
144
|
+
return _running === 0 && self.length() === 0;
|
|
145
|
+
}
|
|
146
|
+
function push(value, done) {
|
|
147
|
+
const current = cache.get();
|
|
148
|
+
current.context = context;
|
|
149
|
+
current.release = release;
|
|
150
|
+
current.value = value;
|
|
151
|
+
current.callback = done || noop;
|
|
152
|
+
current.errorHandler = errorHandler;
|
|
153
|
+
if (_running >= concurrency || self.paused) {
|
|
154
|
+
if (queueTail) {
|
|
155
|
+
queueTail.next = current;
|
|
156
|
+
queueTail = current;
|
|
157
|
+
}
|
|
158
|
+
else {
|
|
159
|
+
queueHead = current;
|
|
160
|
+
queueTail = current;
|
|
161
|
+
self.saturated();
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
else {
|
|
165
|
+
_running++;
|
|
166
|
+
worker.call(context, current.value, current.worked);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
function unshift(value, done) {
|
|
170
|
+
const current = cache.get();
|
|
171
|
+
current.context = context;
|
|
172
|
+
current.release = release;
|
|
173
|
+
current.value = value;
|
|
174
|
+
current.callback = done || noop;
|
|
175
|
+
current.errorHandler = errorHandler;
|
|
176
|
+
if (_running >= concurrency || self.paused) {
|
|
177
|
+
if (queueHead) {
|
|
178
|
+
current.next = queueHead;
|
|
179
|
+
queueHead = current;
|
|
180
|
+
}
|
|
181
|
+
else {
|
|
182
|
+
queueHead = current;
|
|
183
|
+
queueTail = current;
|
|
184
|
+
self.saturated();
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
else {
|
|
188
|
+
_running++;
|
|
189
|
+
worker.call(context, current.value, current.worked);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
function release(holder) {
|
|
193
|
+
if (holder) {
|
|
194
|
+
cache.release(holder);
|
|
195
|
+
}
|
|
196
|
+
const next = queueHead;
|
|
197
|
+
if (next) {
|
|
198
|
+
if (!self.paused) {
|
|
199
|
+
if (queueTail === queueHead) {
|
|
200
|
+
queueTail = null;
|
|
201
|
+
}
|
|
202
|
+
queueHead = next.next;
|
|
203
|
+
next.next = null;
|
|
204
|
+
worker.call(context, next.value, next.worked);
|
|
205
|
+
if (queueTail === null) {
|
|
206
|
+
self.empty();
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
else {
|
|
210
|
+
_running--;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
else if (--_running === 0) {
|
|
214
|
+
self.drain();
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
function kill() {
|
|
218
|
+
queueHead = null;
|
|
219
|
+
queueTail = null;
|
|
220
|
+
self.drain = noop;
|
|
221
|
+
}
|
|
222
|
+
function killAndDrain() {
|
|
223
|
+
queueHead = null;
|
|
224
|
+
queueTail = null;
|
|
225
|
+
self.drain();
|
|
226
|
+
self.drain = noop;
|
|
227
|
+
}
|
|
228
|
+
function error(handler) {
|
|
229
|
+
errorHandler = handler;
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
function queueAsPromised(contextOrWorker, workerOrConcurrency, _concurrency) {
|
|
233
|
+
let context = null;
|
|
234
|
+
let worker;
|
|
235
|
+
let concurrency;
|
|
236
|
+
if (typeof contextOrWorker === "function") {
|
|
237
|
+
context = null;
|
|
238
|
+
worker = contextOrWorker;
|
|
239
|
+
concurrency = workerOrConcurrency;
|
|
240
|
+
}
|
|
241
|
+
else {
|
|
242
|
+
context = contextOrWorker;
|
|
243
|
+
worker = workerOrConcurrency;
|
|
244
|
+
concurrency = _concurrency;
|
|
245
|
+
}
|
|
246
|
+
function asyncWrapper(arg, cb) {
|
|
247
|
+
worker.call(context, arg).then((res) => cb(null, res), (err) => cb(err));
|
|
248
|
+
}
|
|
249
|
+
const queue = fastq(context, asyncWrapper, concurrency);
|
|
250
|
+
const pushCb = queue.push;
|
|
251
|
+
const unshiftCb = queue.unshift;
|
|
252
|
+
const promiseQueue = {
|
|
253
|
+
...queue,
|
|
254
|
+
push,
|
|
255
|
+
unshift,
|
|
256
|
+
drained,
|
|
257
|
+
};
|
|
258
|
+
// Make sure drain property is properly connected
|
|
259
|
+
Object.defineProperty(promiseQueue, "drain", {
|
|
260
|
+
get: () => queue.drain,
|
|
261
|
+
set: (value) => {
|
|
262
|
+
queue.drain = value;
|
|
263
|
+
},
|
|
264
|
+
});
|
|
265
|
+
return promiseQueue;
|
|
266
|
+
function push(value) {
|
|
267
|
+
const p = new Promise((resolve, reject) => {
|
|
268
|
+
pushCb(value, (err, result) => {
|
|
269
|
+
if (err) {
|
|
270
|
+
reject(err);
|
|
271
|
+
return;
|
|
272
|
+
}
|
|
273
|
+
resolve(result);
|
|
274
|
+
});
|
|
275
|
+
});
|
|
276
|
+
// Fork the promise chain to prevent unhandled rejection
|
|
277
|
+
p.catch(noop);
|
|
278
|
+
return p;
|
|
279
|
+
}
|
|
280
|
+
function unshift(value) {
|
|
281
|
+
const p = new Promise((resolve, reject) => {
|
|
282
|
+
unshiftCb(value, (err, result) => {
|
|
283
|
+
if (err) {
|
|
284
|
+
reject(err);
|
|
285
|
+
return;
|
|
286
|
+
}
|
|
287
|
+
resolve(result);
|
|
288
|
+
});
|
|
289
|
+
});
|
|
290
|
+
// Fork the promise chain to prevent unhandled rejection
|
|
291
|
+
p.catch(noop);
|
|
292
|
+
return p;
|
|
293
|
+
}
|
|
294
|
+
function drained() {
|
|
295
|
+
return new Promise((resolve) => {
|
|
296
|
+
nextTick(() => {
|
|
297
|
+
if (queue.idle()) {
|
|
298
|
+
resolve();
|
|
299
|
+
}
|
|
300
|
+
else {
|
|
301
|
+
const previousDrain = queue.drain;
|
|
302
|
+
queue.drain = () => {
|
|
303
|
+
if (typeof previousDrain === "function" && previousDrain !== noop) {
|
|
304
|
+
previousDrain();
|
|
305
|
+
}
|
|
306
|
+
resolve();
|
|
307
|
+
queue.drain = previousDrain;
|
|
308
|
+
};
|
|
309
|
+
}
|
|
310
|
+
});
|
|
311
|
+
});
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
// Add promise function to namespace
|
|
315
|
+
fastq.promise = queueAsPromised;
|
|
316
|
+
// Modern ESM exports
|
|
317
|
+
exports.default = fastq;
|
|
318
|
+
exports.promise = queueAsPromised;
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/** biome-ignore-all lint/style/noNonNullAssertion lint/complexity/useOptionalChain: <!> */
|
|
2
|
+
declare namespace fastq {
|
|
3
|
+
type worker<C, T = any, R = any> = (this: C, task: T, cb: fastq.done<R>) => void;
|
|
4
|
+
type asyncWorker<C, T = any, R = any> = (this: C, task: T) => Promise<R>;
|
|
5
|
+
type done<R = any> = (err: Error | null, result?: R) => void;
|
|
6
|
+
type errorHandler<T = any> = (err: Error, task: T) => void;
|
|
7
|
+
interface queue<T = any, R = any> {
|
|
8
|
+
/** Add a task at the end of the queue. `done(err, result)` will be called when the task was processed. */
|
|
9
|
+
push(task: T, done?: done<R>): void;
|
|
10
|
+
/** Add a task at the beginning of the queue. `done(err, result)` will be called when the task was processed. */
|
|
11
|
+
unshift(task: T, done?: done<R>): void;
|
|
12
|
+
/** Pause the processing of tasks. Currently worked tasks are not stopped. */
|
|
13
|
+
pause(): any;
|
|
14
|
+
/** Resume the processing of tasks. */
|
|
15
|
+
resume(): any;
|
|
16
|
+
running(): number;
|
|
17
|
+
/** Returns `false` if there are tasks being processed or waiting to be processed. `true` otherwise. */
|
|
18
|
+
idle(): boolean;
|
|
19
|
+
/** Returns the number of tasks waiting to be processed (in the queue). */
|
|
20
|
+
length(): number;
|
|
21
|
+
/** Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks */
|
|
22
|
+
getQueue(): T[];
|
|
23
|
+
/** Removes all tasks waiting to be processed, and reset `drain` to an empty function. */
|
|
24
|
+
kill(): any;
|
|
25
|
+
/** Same than `kill` but the `drain` function will be called before reset to empty. */
|
|
26
|
+
killAndDrain(): any;
|
|
27
|
+
/** Set a global error handler. `handler(err, task)` will be called each time a task is completed, `err` will be not null if the task has thrown an error. */
|
|
28
|
+
error(handler: errorHandler<T>): void;
|
|
29
|
+
/** Property that returns the number of concurrent tasks that could be executed in parallel. It can be altered at runtime. */
|
|
30
|
+
concurrency: number;
|
|
31
|
+
/** Property that returns `true` when the queue is in a paused state. */
|
|
32
|
+
paused: boolean;
|
|
33
|
+
/** Function that will be called when the last item from the queue has been processed by a worker. It can be altered at runtime. */
|
|
34
|
+
drain(): any;
|
|
35
|
+
/** Function that will be called when the last item from the queue has been assigned to a worker. It can be altered at runtime. */
|
|
36
|
+
empty: () => void;
|
|
37
|
+
/** Function that will be called when the queue hits the concurrency limit. It can be altered at runtime. */
|
|
38
|
+
saturated: () => void;
|
|
39
|
+
}
|
|
40
|
+
interface queueAsPromised<T = any, R = any> extends queue<T, R> {
|
|
41
|
+
/** Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */
|
|
42
|
+
push(task: T): Promise<R>;
|
|
43
|
+
/** Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */
|
|
44
|
+
unshift(task: T): Promise<R>;
|
|
45
|
+
/** Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. */
|
|
46
|
+
drained(): Promise<void>;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
declare function fastq<C, T = any, R = any>(context: C, worker: fastq.worker<C, T, R>, _concurrency: number): fastq.queue<T, R>;
|
|
50
|
+
declare function fastq<C, T = any, R = any>(worker: fastq.worker<C, T, R>, _concurrency: number): fastq.queue<T, R>;
|
|
51
|
+
declare namespace fastq {
|
|
52
|
+
var promise: typeof queueAsPromised;
|
|
53
|
+
}
|
|
54
|
+
declare function queueAsPromised<C, T = any, R = any>(context: C, worker: fastq.asyncWorker<C, T, R>, _concurrency: number): fastq.queueAsPromised<T, R>;
|
|
55
|
+
declare function queueAsPromised<C, T = any, R = any>(worker: fastq.asyncWorker<C, T, R>, _concurrency: number): fastq.queueAsPromised<T, R>;
|
|
56
|
+
export default fastq;
|
|
57
|
+
export declare const promise: typeof queueAsPromised;
|
package/lib/esm/agents/agent.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import fastq from "fastq";
|
|
2
1
|
import { z } from "zod";
|
|
3
2
|
import { PromptBuilder } from "../prompt/prompt-builder.js";
|
|
4
3
|
import { STRUCTURED_STREAM_INSTRUCTIONS } from "../prompt/prompts/structured-stream-instructions.js";
|
|
5
4
|
import { AgentMessageTemplate, ToolMessageTemplate } from "../prompt/template.js";
|
|
5
|
+
import * as fastq from "../utils/queue.js";
|
|
6
6
|
import { ExtractMetadataTransform } from "../utils/structured-stream-extractor.js";
|
|
7
7
|
import { checkArguments, isEmpty } from "../utils/type-utils.js";
|
|
8
8
|
import { Agent, agentOptionsSchema, isAgentResponseDelta, } from "./agent.js";
|
|
@@ -6,7 +6,7 @@ import { checkArguments, isNil, omitByDeep } from "../utils/type-utils.js";
|
|
|
6
6
|
import { agentOptionsSchema, } from "./agent.js";
|
|
7
7
|
import { fileContentSchema, fileUnionContentSchema, localContentSchema, Model, urlContentSchema, } from "./model.js";
|
|
8
8
|
const CHAT_MODEL_DEFAULT_RETRY_OPTIONS = {
|
|
9
|
-
retries:
|
|
9
|
+
retries: 3,
|
|
10
10
|
shouldRetry: async (error) => error instanceof StructuredOutputError || (await import("is-network-error")).default(error),
|
|
11
11
|
};
|
|
12
12
|
export class StructuredOutputError extends Error {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import assert from "node:assert";
|
|
2
|
-
import fastq from "fastq";
|
|
3
2
|
import { produce } from "immer";
|
|
3
|
+
import * as fastq from "../utils/queue.js";
|
|
4
4
|
import { mergeAgentResponseChunk } from "../utils/stream-utils.js";
|
|
5
5
|
import { isEmpty, isNil, isRecord, omit } from "../utils/type-utils.js";
|
|
6
6
|
import { Agent, agentProcessResultToObject, isAgentResponseDelta, } from "./agent.js";
|
|
@@ -2,5 +2,6 @@ export function mergeUsage(...usages) {
|
|
|
2
2
|
return {
|
|
3
3
|
inputTokens: usages.reduce((acc, usage) => (usage ? acc + usage.inputTokens : acc), 0),
|
|
4
4
|
outputTokens: usages.reduce((acc, usage) => (usage ? acc + usage.outputTokens : acc), 0),
|
|
5
|
+
aigneHubCredits: usages.reduce((acc, usage) => (usage?.aigneHubCredits ? acc + usage.aigneHubCredits : acc), 0),
|
|
5
6
|
};
|
|
6
7
|
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/** biome-ignore-all lint/style/noNonNullAssertion lint/complexity/useOptionalChain: <!> */
|
|
2
|
+
declare namespace fastq {
|
|
3
|
+
type worker<C, T = any, R = any> = (this: C, task: T, cb: fastq.done<R>) => void;
|
|
4
|
+
type asyncWorker<C, T = any, R = any> = (this: C, task: T) => Promise<R>;
|
|
5
|
+
type done<R = any> = (err: Error | null, result?: R) => void;
|
|
6
|
+
type errorHandler<T = any> = (err: Error, task: T) => void;
|
|
7
|
+
interface queue<T = any, R = any> {
|
|
8
|
+
/** Add a task at the end of the queue. `done(err, result)` will be called when the task was processed. */
|
|
9
|
+
push(task: T, done?: done<R>): void;
|
|
10
|
+
/** Add a task at the beginning of the queue. `done(err, result)` will be called when the task was processed. */
|
|
11
|
+
unshift(task: T, done?: done<R>): void;
|
|
12
|
+
/** Pause the processing of tasks. Currently worked tasks are not stopped. */
|
|
13
|
+
pause(): any;
|
|
14
|
+
/** Resume the processing of tasks. */
|
|
15
|
+
resume(): any;
|
|
16
|
+
running(): number;
|
|
17
|
+
/** Returns `false` if there are tasks being processed or waiting to be processed. `true` otherwise. */
|
|
18
|
+
idle(): boolean;
|
|
19
|
+
/** Returns the number of tasks waiting to be processed (in the queue). */
|
|
20
|
+
length(): number;
|
|
21
|
+
/** Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks */
|
|
22
|
+
getQueue(): T[];
|
|
23
|
+
/** Removes all tasks waiting to be processed, and reset `drain` to an empty function. */
|
|
24
|
+
kill(): any;
|
|
25
|
+
/** Same than `kill` but the `drain` function will be called before reset to empty. */
|
|
26
|
+
killAndDrain(): any;
|
|
27
|
+
/** Set a global error handler. `handler(err, task)` will be called each time a task is completed, `err` will be not null if the task has thrown an error. */
|
|
28
|
+
error(handler: errorHandler<T>): void;
|
|
29
|
+
/** Property that returns the number of concurrent tasks that could be executed in parallel. It can be altered at runtime. */
|
|
30
|
+
concurrency: number;
|
|
31
|
+
/** Property that returns `true` when the queue is in a paused state. */
|
|
32
|
+
paused: boolean;
|
|
33
|
+
/** Function that will be called when the last item from the queue has been processed by a worker. It can be altered at runtime. */
|
|
34
|
+
drain(): any;
|
|
35
|
+
/** Function that will be called when the last item from the queue has been assigned to a worker. It can be altered at runtime. */
|
|
36
|
+
empty: () => void;
|
|
37
|
+
/** Function that will be called when the queue hits the concurrency limit. It can be altered at runtime. */
|
|
38
|
+
saturated: () => void;
|
|
39
|
+
}
|
|
40
|
+
interface queueAsPromised<T = any, R = any> extends queue<T, R> {
|
|
41
|
+
/** Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */
|
|
42
|
+
push(task: T): Promise<R>;
|
|
43
|
+
/** Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */
|
|
44
|
+
unshift(task: T): Promise<R>;
|
|
45
|
+
/** Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. */
|
|
46
|
+
drained(): Promise<void>;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
declare function fastq<C, T = any, R = any>(context: C, worker: fastq.worker<C, T, R>, _concurrency: number): fastq.queue<T, R>;
|
|
50
|
+
declare function fastq<C, T = any, R = any>(worker: fastq.worker<C, T, R>, _concurrency: number): fastq.queue<T, R>;
|
|
51
|
+
declare namespace fastq {
|
|
52
|
+
var promise: typeof queueAsPromised;
|
|
53
|
+
}
|
|
54
|
+
declare function queueAsPromised<C, T = any, R = any>(context: C, worker: fastq.asyncWorker<C, T, R>, _concurrency: number): fastq.queueAsPromised<T, R>;
|
|
55
|
+
declare function queueAsPromised<C, T = any, R = any>(worker: fastq.asyncWorker<C, T, R>, _concurrency: number): fastq.queueAsPromised<T, R>;
|
|
56
|
+
export default fastq;
|
|
57
|
+
export declare const promise: typeof queueAsPromised;
|
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
/** biome-ignore-all lint/style/noNonNullAssertion lint/complexity/useOptionalChain: <!> */
|
|
2
|
+
// Cross-platform queue implementation compatible with Node.js and browser
|
|
3
|
+
// Based on fastqueue library but implemented in TypeScript
|
|
4
|
+
// Cross-platform nextTick implementation
|
|
5
|
+
const nextTick = (() => {
|
|
6
|
+
if (typeof process !== "undefined" && process.nextTick) {
|
|
7
|
+
return process.nextTick;
|
|
8
|
+
}
|
|
9
|
+
if (typeof setImmediate !== "undefined") {
|
|
10
|
+
return setImmediate;
|
|
11
|
+
}
|
|
12
|
+
return (callback) => setTimeout(callback, 0);
|
|
13
|
+
})();
|
|
14
|
+
// Simple object pooling implementation to replace reusify
|
|
15
|
+
function createObjectPool(factory) {
|
|
16
|
+
const pool = [];
|
|
17
|
+
return {
|
|
18
|
+
get() {
|
|
19
|
+
return pool.pop() || factory();
|
|
20
|
+
},
|
|
21
|
+
release(obj) {
|
|
22
|
+
pool.push(obj);
|
|
23
|
+
},
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
function noop() { }
|
|
27
|
+
function createTask() {
|
|
28
|
+
const task = {
|
|
29
|
+
value: null,
|
|
30
|
+
callback: noop,
|
|
31
|
+
next: null,
|
|
32
|
+
release: noop,
|
|
33
|
+
context: null,
|
|
34
|
+
errorHandler: null,
|
|
35
|
+
worked: function worked(err, result) {
|
|
36
|
+
const callback = task.callback;
|
|
37
|
+
const errorHandler = task.errorHandler;
|
|
38
|
+
const val = task.value;
|
|
39
|
+
task.value = null;
|
|
40
|
+
task.callback = noop;
|
|
41
|
+
if (task.errorHandler && err) {
|
|
42
|
+
errorHandler(err, val);
|
|
43
|
+
}
|
|
44
|
+
callback.call(task.context, err, result);
|
|
45
|
+
task.release(task);
|
|
46
|
+
},
|
|
47
|
+
};
|
|
48
|
+
return task;
|
|
49
|
+
}
|
|
50
|
+
function fastq(contextOrWorker, workerOrConcurrency, _concurrency) {
|
|
51
|
+
let context = null;
|
|
52
|
+
let worker;
|
|
53
|
+
let concurrency;
|
|
54
|
+
if (typeof contextOrWorker === "function") {
|
|
55
|
+
context = null;
|
|
56
|
+
worker = contextOrWorker;
|
|
57
|
+
concurrency = workerOrConcurrency;
|
|
58
|
+
}
|
|
59
|
+
else {
|
|
60
|
+
context = contextOrWorker;
|
|
61
|
+
worker = workerOrConcurrency;
|
|
62
|
+
concurrency = _concurrency;
|
|
63
|
+
}
|
|
64
|
+
if (!(concurrency >= 1)) {
|
|
65
|
+
throw new Error("fastq concurrency must be equal to or greater than 1");
|
|
66
|
+
}
|
|
67
|
+
const cache = createObjectPool(() => createTask());
|
|
68
|
+
let queueHead = null;
|
|
69
|
+
let queueTail = null;
|
|
70
|
+
let _running = 0;
|
|
71
|
+
let errorHandler = null;
|
|
72
|
+
const self = {
|
|
73
|
+
push,
|
|
74
|
+
drain: noop,
|
|
75
|
+
saturated: noop,
|
|
76
|
+
pause,
|
|
77
|
+
paused: false,
|
|
78
|
+
get concurrency() {
|
|
79
|
+
return concurrency;
|
|
80
|
+
},
|
|
81
|
+
set concurrency(value) {
|
|
82
|
+
if (!(value >= 1)) {
|
|
83
|
+
throw new Error("fastq concurrency must be equal to or greater than 1");
|
|
84
|
+
}
|
|
85
|
+
concurrency = value;
|
|
86
|
+
if (self.paused)
|
|
87
|
+
return;
|
|
88
|
+
for (; queueHead && _running < concurrency;) {
|
|
89
|
+
_running++;
|
|
90
|
+
release();
|
|
91
|
+
}
|
|
92
|
+
},
|
|
93
|
+
running,
|
|
94
|
+
resume,
|
|
95
|
+
idle,
|
|
96
|
+
length,
|
|
97
|
+
getQueue,
|
|
98
|
+
unshift,
|
|
99
|
+
empty: noop,
|
|
100
|
+
kill,
|
|
101
|
+
killAndDrain,
|
|
102
|
+
error,
|
|
103
|
+
};
|
|
104
|
+
return self;
|
|
105
|
+
function running() {
|
|
106
|
+
return _running;
|
|
107
|
+
}
|
|
108
|
+
function pause() {
|
|
109
|
+
self.paused = true;
|
|
110
|
+
}
|
|
111
|
+
function length() {
|
|
112
|
+
let current = queueHead;
|
|
113
|
+
let counter = 0;
|
|
114
|
+
while (current) {
|
|
115
|
+
current = current.next;
|
|
116
|
+
counter++;
|
|
117
|
+
}
|
|
118
|
+
return counter;
|
|
119
|
+
}
|
|
120
|
+
function getQueue() {
|
|
121
|
+
let current = queueHead;
|
|
122
|
+
const tasks = [];
|
|
123
|
+
while (current) {
|
|
124
|
+
if (current.value !== null) {
|
|
125
|
+
tasks.push(current.value);
|
|
126
|
+
}
|
|
127
|
+
current = current.next;
|
|
128
|
+
}
|
|
129
|
+
return tasks;
|
|
130
|
+
}
|
|
131
|
+
function resume() {
|
|
132
|
+
if (!self.paused)
|
|
133
|
+
return;
|
|
134
|
+
self.paused = false;
|
|
135
|
+
for (; queueHead && _running < concurrency;) {
|
|
136
|
+
_running++;
|
|
137
|
+
release();
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
function idle() {
|
|
141
|
+
return _running === 0 && self.length() === 0;
|
|
142
|
+
}
|
|
143
|
+
function push(value, done) {
|
|
144
|
+
const current = cache.get();
|
|
145
|
+
current.context = context;
|
|
146
|
+
current.release = release;
|
|
147
|
+
current.value = value;
|
|
148
|
+
current.callback = done || noop;
|
|
149
|
+
current.errorHandler = errorHandler;
|
|
150
|
+
if (_running >= concurrency || self.paused) {
|
|
151
|
+
if (queueTail) {
|
|
152
|
+
queueTail.next = current;
|
|
153
|
+
queueTail = current;
|
|
154
|
+
}
|
|
155
|
+
else {
|
|
156
|
+
queueHead = current;
|
|
157
|
+
queueTail = current;
|
|
158
|
+
self.saturated();
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
else {
|
|
162
|
+
_running++;
|
|
163
|
+
worker.call(context, current.value, current.worked);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
function unshift(value, done) {
|
|
167
|
+
const current = cache.get();
|
|
168
|
+
current.context = context;
|
|
169
|
+
current.release = release;
|
|
170
|
+
current.value = value;
|
|
171
|
+
current.callback = done || noop;
|
|
172
|
+
current.errorHandler = errorHandler;
|
|
173
|
+
if (_running >= concurrency || self.paused) {
|
|
174
|
+
if (queueHead) {
|
|
175
|
+
current.next = queueHead;
|
|
176
|
+
queueHead = current;
|
|
177
|
+
}
|
|
178
|
+
else {
|
|
179
|
+
queueHead = current;
|
|
180
|
+
queueTail = current;
|
|
181
|
+
self.saturated();
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
else {
|
|
185
|
+
_running++;
|
|
186
|
+
worker.call(context, current.value, current.worked);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
function release(holder) {
|
|
190
|
+
if (holder) {
|
|
191
|
+
cache.release(holder);
|
|
192
|
+
}
|
|
193
|
+
const next = queueHead;
|
|
194
|
+
if (next) {
|
|
195
|
+
if (!self.paused) {
|
|
196
|
+
if (queueTail === queueHead) {
|
|
197
|
+
queueTail = null;
|
|
198
|
+
}
|
|
199
|
+
queueHead = next.next;
|
|
200
|
+
next.next = null;
|
|
201
|
+
worker.call(context, next.value, next.worked);
|
|
202
|
+
if (queueTail === null) {
|
|
203
|
+
self.empty();
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
else {
|
|
207
|
+
_running--;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
else if (--_running === 0) {
|
|
211
|
+
self.drain();
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
function kill() {
|
|
215
|
+
queueHead = null;
|
|
216
|
+
queueTail = null;
|
|
217
|
+
self.drain = noop;
|
|
218
|
+
}
|
|
219
|
+
function killAndDrain() {
|
|
220
|
+
queueHead = null;
|
|
221
|
+
queueTail = null;
|
|
222
|
+
self.drain();
|
|
223
|
+
self.drain = noop;
|
|
224
|
+
}
|
|
225
|
+
function error(handler) {
|
|
226
|
+
errorHandler = handler;
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
function queueAsPromised(contextOrWorker, workerOrConcurrency, _concurrency) {
|
|
230
|
+
let context = null;
|
|
231
|
+
let worker;
|
|
232
|
+
let concurrency;
|
|
233
|
+
if (typeof contextOrWorker === "function") {
|
|
234
|
+
context = null;
|
|
235
|
+
worker = contextOrWorker;
|
|
236
|
+
concurrency = workerOrConcurrency;
|
|
237
|
+
}
|
|
238
|
+
else {
|
|
239
|
+
context = contextOrWorker;
|
|
240
|
+
worker = workerOrConcurrency;
|
|
241
|
+
concurrency = _concurrency;
|
|
242
|
+
}
|
|
243
|
+
function asyncWrapper(arg, cb) {
|
|
244
|
+
worker.call(context, arg).then((res) => cb(null, res), (err) => cb(err));
|
|
245
|
+
}
|
|
246
|
+
const queue = fastq(context, asyncWrapper, concurrency);
|
|
247
|
+
const pushCb = queue.push;
|
|
248
|
+
const unshiftCb = queue.unshift;
|
|
249
|
+
const promiseQueue = {
|
|
250
|
+
...queue,
|
|
251
|
+
push,
|
|
252
|
+
unshift,
|
|
253
|
+
drained,
|
|
254
|
+
};
|
|
255
|
+
// Make sure drain property is properly connected
|
|
256
|
+
Object.defineProperty(promiseQueue, "drain", {
|
|
257
|
+
get: () => queue.drain,
|
|
258
|
+
set: (value) => {
|
|
259
|
+
queue.drain = value;
|
|
260
|
+
},
|
|
261
|
+
});
|
|
262
|
+
return promiseQueue;
|
|
263
|
+
function push(value) {
|
|
264
|
+
const p = new Promise((resolve, reject) => {
|
|
265
|
+
pushCb(value, (err, result) => {
|
|
266
|
+
if (err) {
|
|
267
|
+
reject(err);
|
|
268
|
+
return;
|
|
269
|
+
}
|
|
270
|
+
resolve(result);
|
|
271
|
+
});
|
|
272
|
+
});
|
|
273
|
+
// Fork the promise chain to prevent unhandled rejection
|
|
274
|
+
p.catch(noop);
|
|
275
|
+
return p;
|
|
276
|
+
}
|
|
277
|
+
function unshift(value) {
|
|
278
|
+
const p = new Promise((resolve, reject) => {
|
|
279
|
+
unshiftCb(value, (err, result) => {
|
|
280
|
+
if (err) {
|
|
281
|
+
reject(err);
|
|
282
|
+
return;
|
|
283
|
+
}
|
|
284
|
+
resolve(result);
|
|
285
|
+
});
|
|
286
|
+
});
|
|
287
|
+
// Fork the promise chain to prevent unhandled rejection
|
|
288
|
+
p.catch(noop);
|
|
289
|
+
return p;
|
|
290
|
+
}
|
|
291
|
+
function drained() {
|
|
292
|
+
return new Promise((resolve) => {
|
|
293
|
+
nextTick(() => {
|
|
294
|
+
if (queue.idle()) {
|
|
295
|
+
resolve();
|
|
296
|
+
}
|
|
297
|
+
else {
|
|
298
|
+
const previousDrain = queue.drain;
|
|
299
|
+
queue.drain = () => {
|
|
300
|
+
if (typeof previousDrain === "function" && previousDrain !== noop) {
|
|
301
|
+
previousDrain();
|
|
302
|
+
}
|
|
303
|
+
resolve();
|
|
304
|
+
queue.drain = previousDrain;
|
|
305
|
+
};
|
|
306
|
+
}
|
|
307
|
+
});
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
// Add promise function to namespace
|
|
312
|
+
fastq.promise = queueAsPromised;
|
|
313
|
+
// Modern ESM exports
|
|
314
|
+
export default fastq;
|
|
315
|
+
export const promise = queueAsPromised;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/core",
|
|
3
|
-
"version": "1.63.0
|
|
3
|
+
"version": "1.63.0",
|
|
4
4
|
"description": "The functional core of agentic AI",
|
|
5
5
|
"publishConfig": {
|
|
6
6
|
"access": "public"
|
|
@@ -77,7 +77,6 @@
|
|
|
77
77
|
"debug": "^4.4.3",
|
|
78
78
|
"eventsource-parser": "^3.0.6",
|
|
79
79
|
"fast-deep-equal": "^3.1.3",
|
|
80
|
-
"fastq": "^1.19.1",
|
|
81
80
|
"immer": "^10.1.3",
|
|
82
81
|
"is-network-error": "^1.2.0",
|
|
83
82
|
"jaison": "^2.0.2",
|
|
@@ -92,8 +91,8 @@
|
|
|
92
91
|
"zod": "^3.25.67",
|
|
93
92
|
"zod-from-json-schema": "^0.0.5",
|
|
94
93
|
"zod-to-json-schema": "^3.24.6",
|
|
95
|
-
"@aigne/afs": "^1.1.0
|
|
96
|
-
"@aigne/observability-api": "^0.11.2
|
|
94
|
+
"@aigne/afs": "^1.1.0",
|
|
95
|
+
"@aigne/observability-api": "^0.11.2",
|
|
97
96
|
"@aigne/platform-helpers": "^0.6.3"
|
|
98
97
|
},
|
|
99
98
|
"devDependencies": {
|