@langchain/langgraph-api 0.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +3 -0
- package/dist/api/assistants.mjs +144 -0
- package/dist/api/runs.mjs +239 -0
- package/dist/api/store.mjs +83 -0
- package/dist/api/threads.mjs +143 -0
- package/dist/cli/entrypoint.mjs +42 -0
- package/dist/cli/spawn.d.mts +14 -0
- package/dist/cli/spawn.mjs +34 -0
- package/dist/cli/utils/ipc/client.mjs +47 -0
- package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +29 -0
- package/dist/cli/utils/ipc/utils/temporary-directory.mjs +40 -0
- package/dist/graph/load.hooks.mjs +17 -0
- package/dist/graph/load.mjs +72 -0
- package/dist/graph/load.utils.mjs +50 -0
- package/dist/graph/parser/parser.mjs +309 -0
- package/dist/graph/parser/parser.worker.mjs +7 -0
- package/dist/graph/parser/schema/types.mjs +1607 -0
- package/dist/graph/parser/schema/types.template.mts +83 -0
- package/dist/logging.mjs +100 -0
- package/dist/preload.mjs +3 -0
- package/dist/queue.mjs +93 -0
- package/dist/schemas.mjs +407 -0
- package/dist/server.mjs +74 -0
- package/dist/state.mjs +32 -0
- package/dist/storage/checkpoint.mjs +127 -0
- package/dist/storage/importMap.mjs +55 -0
- package/dist/storage/ops.mjs +792 -0
- package/dist/storage/persist.mjs +78 -0
- package/dist/storage/store.mjs +41 -0
- package/dist/stream.mjs +215 -0
- package/dist/utils/abort.mjs +8 -0
- package/dist/utils/hono.mjs +27 -0
- package/dist/utils/importMap.mjs +55 -0
- package/dist/utils/runnableConfig.mjs +45 -0
- package/dist/utils/serde.mjs +20 -0
- package/package.json +56 -0
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import type { BaseMessage } from "@langchain/core/messages";
|
|
2
|
+
import type {
|
|
3
|
+
StateType,
|
|
4
|
+
UpdateType,
|
|
5
|
+
StateDefinition,
|
|
6
|
+
} from "@langchain/langgraph";
|
|
7
|
+
import type { Graph } from "@langchain/langgraph";
|
|
8
|
+
import type { Pregel } from "@langchain/langgraph/pregel";
|
|
9
|
+
|
|
10
|
+
// @ts-expect-error
|
|
11
|
+
type AnyPregel = Pregel<any, any>;
|
|
12
|
+
|
|
13
|
+
// @ts-expect-error
|
|
14
|
+
type AnyGraph = Graph<any, any, any, any, any>;
|
|
15
|
+
|
|
16
|
+
type Wrap<T> = (a: T) => void;
|
|
17
|
+
type MatchBaseMessage<T> = T extends BaseMessage ? BaseMessage : never;
|
|
18
|
+
type MatchBaseMessageArray<T> =
|
|
19
|
+
T extends Array<infer C>
|
|
20
|
+
? Wrap<MatchBaseMessage<C>> extends Wrap<BaseMessage>
|
|
21
|
+
? BaseMessage[]
|
|
22
|
+
: never
|
|
23
|
+
: never;
|
|
24
|
+
|
|
25
|
+
type Defactorify<T> = T extends (...args: any[]) => infer R
|
|
26
|
+
? Awaited<R>
|
|
27
|
+
: Awaited<T>;
|
|
28
|
+
|
|
29
|
+
// @ts-expect-error
|
|
30
|
+
type Inspect<T> = T extends unknown
|
|
31
|
+
? {
|
|
32
|
+
[K in keyof T]: 0 extends 1 & T[K]
|
|
33
|
+
? T[K]
|
|
34
|
+
: Wrap<MatchBaseMessageArray<T[K]>> extends Wrap<BaseMessage[]>
|
|
35
|
+
? BaseMessage[]
|
|
36
|
+
: Wrap<MatchBaseMessage<T[K]>> extends Wrap<BaseMessage>
|
|
37
|
+
? BaseMessage
|
|
38
|
+
: Inspect<T[K]>;
|
|
39
|
+
}
|
|
40
|
+
: never;
|
|
41
|
+
|
|
42
|
+
type ReflectCompiled<T> = T extends { RunInput: infer S; RunOutput: infer U }
|
|
43
|
+
? { state: S; update: U }
|
|
44
|
+
: T extends { "~InputType": infer InputType; "~OutputType": infer OutputType }
|
|
45
|
+
? { state: OutputType; update: InputType }
|
|
46
|
+
: never;
|
|
47
|
+
|
|
48
|
+
// @ts-expect-error
|
|
49
|
+
type Reflect<T> =
|
|
50
|
+
Defactorify<T> extends infer DT
|
|
51
|
+
? DT extends {
|
|
52
|
+
compile(...args: any[]): infer Compiled;
|
|
53
|
+
}
|
|
54
|
+
? ReflectCompiled<Compiled>
|
|
55
|
+
: ReflectCompiled<DT>
|
|
56
|
+
: never;
|
|
57
|
+
|
|
58
|
+
type BuilderReflectCompiled<T> = T extends {
|
|
59
|
+
builder: {
|
|
60
|
+
_inputDefinition: infer I extends StateDefinition;
|
|
61
|
+
_outputDefinition: infer O extends StateDefinition;
|
|
62
|
+
_configSchema?: infer C extends StateDefinition | undefined;
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
? {
|
|
66
|
+
input: UpdateType<I>;
|
|
67
|
+
output: StateType<O>;
|
|
68
|
+
config: UpdateType<Exclude<C, undefined>>;
|
|
69
|
+
}
|
|
70
|
+
: never;
|
|
71
|
+
|
|
72
|
+
// @ts-expect-error
|
|
73
|
+
type BuilderReflect<T> =
|
|
74
|
+
Defactorify<T> extends infer DT
|
|
75
|
+
? DT extends {
|
|
76
|
+
compile(...args: any[]): infer Compiled;
|
|
77
|
+
}
|
|
78
|
+
? BuilderReflectCompiled<Compiled>
|
|
79
|
+
: BuilderReflectCompiled<DT>
|
|
80
|
+
: never;
|
|
81
|
+
|
|
82
|
+
// @ts-expect-error
|
|
83
|
+
type FilterAny<T> = 0 extends 1 & T ? never : T;
|
package/dist/logging.mjs
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { createLogger, format, transports } from "winston";
|
|
2
|
+
import { logger as honoLogger } from "hono/logger";
|
|
3
|
+
import { consoleFormat } from "winston-console-format";
|
|
4
|
+
import { parse as stacktraceParser } from "stacktrace-parser";
|
|
5
|
+
import { readFileSync } from "fs";
|
|
6
|
+
import { codeFrameColumns } from "@babel/code-frame";
|
|
7
|
+
import path from "node:path";
|
|
8
|
+
const LOG_JSON = process.env.LOG_JSON === "true";
|
|
9
|
+
const LOG_LEVEL = process.env.LOG_LEVEL || "debug";
|
|
10
|
+
export const logger = createLogger({
|
|
11
|
+
level: LOG_LEVEL,
|
|
12
|
+
format: format.combine(format.errors({ stack: true }), format.timestamp(), format.json(), ...(!LOG_JSON
|
|
13
|
+
? [
|
|
14
|
+
format.colorize({ all: true }),
|
|
15
|
+
format.padLevels(),
|
|
16
|
+
consoleFormat({
|
|
17
|
+
showMeta: true,
|
|
18
|
+
metaStrip: ["timestamp"],
|
|
19
|
+
inspectOptions: {
|
|
20
|
+
depth: Infinity,
|
|
21
|
+
colors: true,
|
|
22
|
+
maxArrayLength: Infinity,
|
|
23
|
+
breakLength: 120,
|
|
24
|
+
compact: Infinity,
|
|
25
|
+
},
|
|
26
|
+
}),
|
|
27
|
+
]
|
|
28
|
+
: [
|
|
29
|
+
format.printf((info) => {
|
|
30
|
+
const { timestamp, level, message, ...rest } = info;
|
|
31
|
+
let event;
|
|
32
|
+
if (typeof message === "string") {
|
|
33
|
+
event = message;
|
|
34
|
+
}
|
|
35
|
+
else {
|
|
36
|
+
event = JSON.stringify(message);
|
|
37
|
+
}
|
|
38
|
+
if (rest.stack) {
|
|
39
|
+
rest.message = event;
|
|
40
|
+
event = rest.stack;
|
|
41
|
+
}
|
|
42
|
+
return JSON.stringify({ timestamp, level, event, ...rest });
|
|
43
|
+
}),
|
|
44
|
+
])),
|
|
45
|
+
transports: [new transports.Console()],
|
|
46
|
+
});
|
|
47
|
+
const formatStack = (stack) => {
|
|
48
|
+
if (!stack)
|
|
49
|
+
return stack;
|
|
50
|
+
const [firstFile] = stacktraceParser(stack).filter((item) => !item.file?.split(path.sep).includes("node_modules") &&
|
|
51
|
+
!item.file?.startsWith("node:"));
|
|
52
|
+
if (firstFile?.file && firstFile?.lineNumber) {
|
|
53
|
+
try {
|
|
54
|
+
const filePath = firstFile.file;
|
|
55
|
+
const line = firstFile.lineNumber;
|
|
56
|
+
const column = firstFile.column ?? 0;
|
|
57
|
+
const messageLines = stack.split("\n");
|
|
58
|
+
const spliceIndex = messageLines.findIndex((i) => i.includes(filePath));
|
|
59
|
+
const padding = " ".repeat(Math.max(0, messageLines[spliceIndex].indexOf("at")));
|
|
60
|
+
const highlightCode = process.stdout.isTTY;
|
|
61
|
+
let codeFrame = codeFrameColumns(readFileSync(filePath, "utf-8"), { start: { line, column } }, { highlightCode });
|
|
62
|
+
codeFrame = codeFrame
|
|
63
|
+
.split("\n")
|
|
64
|
+
.map((i) => padding + i + "\x1b[0m")
|
|
65
|
+
.join("\n");
|
|
66
|
+
if (highlightCode) {
|
|
67
|
+
codeFrame = "\x1b[36m" + codeFrame + "\x1b[31m";
|
|
68
|
+
}
|
|
69
|
+
// insert codeframe after the line but dont lose the stack
|
|
70
|
+
return [
|
|
71
|
+
...messageLines.slice(0, spliceIndex + 1),
|
|
72
|
+
codeFrame,
|
|
73
|
+
...messageLines.slice(spliceIndex + 1),
|
|
74
|
+
].join("\n");
|
|
75
|
+
}
|
|
76
|
+
catch {
|
|
77
|
+
// pass
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
return stack;
|
|
81
|
+
};
|
|
82
|
+
export const logError = (error, options) => {
|
|
83
|
+
let message;
|
|
84
|
+
let context = options?.context;
|
|
85
|
+
if (error instanceof Error) {
|
|
86
|
+
message = formatStack(error.stack) || error.message;
|
|
87
|
+
}
|
|
88
|
+
else {
|
|
89
|
+
message = String(error);
|
|
90
|
+
context = { ...context, error };
|
|
91
|
+
}
|
|
92
|
+
if (options?.prefix != null)
|
|
93
|
+
message = `${options.prefix}:\n${message}`;
|
|
94
|
+
logger.error(message, ...(context != null ? [context] : []));
|
|
95
|
+
};
|
|
96
|
+
process.on("uncaughtException", (error) => logError(error));
|
|
97
|
+
process.on("unhandledRejection", (error) => logError(error));
|
|
98
|
+
export const requestLogger = () => honoLogger((message, ...rest) => {
|
|
99
|
+
logger.info(message, ...rest);
|
|
100
|
+
});
|
package/dist/preload.mjs
ADDED
package/dist/queue.mjs
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import { Runs, Threads } from "./storage/ops.mjs";
|
|
2
|
+
import { streamState, } from "./stream.mjs";
|
|
3
|
+
import { logError, logger } from "./logging.mjs";
|
|
4
|
+
import { serializeError } from "./utils/serde.mjs";
|
|
5
|
+
const MAX_RETRY_ATTEMPTS = 3;
|
|
6
|
+
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
7
|
+
export const queue = async () => {
|
|
8
|
+
while (true) {
|
|
9
|
+
for await (const { run, attempt, signal } of Runs.next()) {
|
|
10
|
+
await worker(run, attempt, signal);
|
|
11
|
+
}
|
|
12
|
+
// TODO: this is very suboptimal, we should implement subscription to the run
|
|
13
|
+
await sleep(1000 * Math.random());
|
|
14
|
+
}
|
|
15
|
+
};
|
|
16
|
+
const worker = async (run, attempt, abortSignal) => {
|
|
17
|
+
const startedAt = new Date();
|
|
18
|
+
let checkpoint = undefined;
|
|
19
|
+
let exception = undefined;
|
|
20
|
+
const temporary = run.kwargs.temporary;
|
|
21
|
+
logger.info("Starting background run", {
|
|
22
|
+
run_id: run.run_id,
|
|
23
|
+
run_attempt: attempt,
|
|
24
|
+
run_created_at: run.created_at,
|
|
25
|
+
run_started_at: startedAt,
|
|
26
|
+
run_queue_ms: startedAt.valueOf() - run.created_at.valueOf(),
|
|
27
|
+
});
|
|
28
|
+
const onCheckpoint = (value) => {
|
|
29
|
+
checkpoint = value;
|
|
30
|
+
};
|
|
31
|
+
const onTaskResult = (result) => {
|
|
32
|
+
if (checkpoint == null)
|
|
33
|
+
return;
|
|
34
|
+
const index = checkpoint.tasks.findIndex((task) => task.id === result.id);
|
|
35
|
+
checkpoint.tasks[index] = {
|
|
36
|
+
...checkpoint.tasks[index],
|
|
37
|
+
...result,
|
|
38
|
+
};
|
|
39
|
+
};
|
|
40
|
+
try {
|
|
41
|
+
if (attempt > MAX_RETRY_ATTEMPTS) {
|
|
42
|
+
throw new Error(`Run ${run.run_id} exceeded max attempts`);
|
|
43
|
+
}
|
|
44
|
+
try {
|
|
45
|
+
const stream = streamState(run, attempt, {
|
|
46
|
+
signal: abortSignal,
|
|
47
|
+
...(!temporary ? { onCheckpoint, onTaskResult } : undefined),
|
|
48
|
+
});
|
|
49
|
+
for await (const { event, data } of stream) {
|
|
50
|
+
await Runs.Stream.publish(run.run_id, event, data);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
catch (error) {
|
|
54
|
+
await Runs.Stream.publish(run.run_id, "error", serializeError(error));
|
|
55
|
+
throw error;
|
|
56
|
+
}
|
|
57
|
+
const endedAt = new Date();
|
|
58
|
+
logger.info("Background run succeeded", {
|
|
59
|
+
run_id: run.run_id,
|
|
60
|
+
run_attempt: attempt,
|
|
61
|
+
run_created_at: run.created_at,
|
|
62
|
+
run_started_at: startedAt,
|
|
63
|
+
run_ended_at: endedAt,
|
|
64
|
+
run_exec_ms: endedAt.valueOf() - startedAt.valueOf(),
|
|
65
|
+
});
|
|
66
|
+
await Runs.setStatus(run.run_id, "success");
|
|
67
|
+
}
|
|
68
|
+
catch (error) {
|
|
69
|
+
const endedAt = new Date();
|
|
70
|
+
if (error instanceof Error)
|
|
71
|
+
exception = error;
|
|
72
|
+
logError(error, {
|
|
73
|
+
prefix: "Background run failed",
|
|
74
|
+
context: {
|
|
75
|
+
run_id: run.run_id,
|
|
76
|
+
run_attempt: attempt,
|
|
77
|
+
run_created_at: run.created_at,
|
|
78
|
+
run_started_at: startedAt,
|
|
79
|
+
run_ended_at: endedAt,
|
|
80
|
+
run_exec_ms: endedAt.valueOf() - startedAt.valueOf(),
|
|
81
|
+
},
|
|
82
|
+
});
|
|
83
|
+
await Runs.setStatus(run.run_id, "error");
|
|
84
|
+
}
|
|
85
|
+
finally {
|
|
86
|
+
if (temporary) {
|
|
87
|
+
await Threads.delete(run.thread_id);
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
await Threads.setStatus(run.thread_id, { checkpoint, exception });
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
};
|
package/dist/schemas.mjs
ADDED
|
@@ -0,0 +1,407 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
export const AssistantConfigurable = z
|
|
3
|
+
.object({
|
|
4
|
+
thread_id: z.string().optional(),
|
|
5
|
+
thread_ts: z.string().optional(),
|
|
6
|
+
})
|
|
7
|
+
.catchall(z.unknown());
|
|
8
|
+
export const AssistantConfig = z
|
|
9
|
+
.object({
|
|
10
|
+
tags: z.array(z.string()).optional(),
|
|
11
|
+
recursion_limit: z.number().int().optional(),
|
|
12
|
+
configurable: AssistantConfigurable.optional(),
|
|
13
|
+
})
|
|
14
|
+
.catchall(z.unknown())
|
|
15
|
+
.describe("The configuration of an assistant.");
|
|
16
|
+
export const Assistant = z.object({
|
|
17
|
+
assistant_id: z.string().uuid(),
|
|
18
|
+
graph_id: z.string(),
|
|
19
|
+
config: AssistantConfig,
|
|
20
|
+
created_at: z.string(),
|
|
21
|
+
updated_at: z.string(),
|
|
22
|
+
metadata: z.object({}).catchall(z.any()),
|
|
23
|
+
});
|
|
24
|
+
export const AssistantCreate = z
|
|
25
|
+
.object({
|
|
26
|
+
assistant_id: z
|
|
27
|
+
.string()
|
|
28
|
+
.uuid()
|
|
29
|
+
.describe("The ID of the assistant. If not provided, an ID is generated.")
|
|
30
|
+
.optional(),
|
|
31
|
+
graph_id: z.string().describe("The graph to use."),
|
|
32
|
+
config: AssistantConfig.optional(),
|
|
33
|
+
metadata: z
|
|
34
|
+
.object({})
|
|
35
|
+
.catchall(z.unknown())
|
|
36
|
+
.describe("Metadata for the assistant.")
|
|
37
|
+
.optional(),
|
|
38
|
+
if_exists: z
|
|
39
|
+
.union([z.literal("raise"), z.literal("do_nothing")])
|
|
40
|
+
.optional(),
|
|
41
|
+
name: z.string().optional(),
|
|
42
|
+
})
|
|
43
|
+
.describe("Payload for creating an assistant.");
|
|
44
|
+
export const AssistantPatch = z
|
|
45
|
+
.object({
|
|
46
|
+
graph_id: z.string().describe("The graph to use.").optional(),
|
|
47
|
+
config: AssistantConfig.optional(),
|
|
48
|
+
metadata: z
|
|
49
|
+
.object({})
|
|
50
|
+
.catchall(z.any())
|
|
51
|
+
.describe("Metadata to merge with existing assistant metadata.")
|
|
52
|
+
.optional(),
|
|
53
|
+
})
|
|
54
|
+
.describe("Payload for updating an assistant.");
|
|
55
|
+
export const Config = z.object({
|
|
56
|
+
tags: z.array(z.string()).optional(),
|
|
57
|
+
recursion_limit: z.number().int().optional(),
|
|
58
|
+
configurable: z.object({}).catchall(z.any()).optional(),
|
|
59
|
+
});
|
|
60
|
+
export const Cron = z.object({
|
|
61
|
+
cron_id: z.string().uuid(),
|
|
62
|
+
thread_id: z.string().uuid(),
|
|
63
|
+
end_time: z.string(),
|
|
64
|
+
schedule: z.string(),
|
|
65
|
+
created_at: z.string(),
|
|
66
|
+
updated_at: z.string(),
|
|
67
|
+
payload: z.object({}).catchall(z.any()),
|
|
68
|
+
});
|
|
69
|
+
export const CheckpointSchema = z.object({
|
|
70
|
+
checkpoint_id: z.string().uuid().optional(),
|
|
71
|
+
checkpoint_ns: z.string().nullish(),
|
|
72
|
+
checkpoint_map: z.record(z.unknown()).nullish(),
|
|
73
|
+
});
|
|
74
|
+
export const CronCreate = z
|
|
75
|
+
.object({
|
|
76
|
+
thread_id: z.string().uuid(),
|
|
77
|
+
assistant_id: z.string().uuid(),
|
|
78
|
+
checkpoint_id: z.string().optional(),
|
|
79
|
+
input: z
|
|
80
|
+
.union([
|
|
81
|
+
z.array(z.object({}).catchall(z.any())),
|
|
82
|
+
z.object({}).catchall(z.any()),
|
|
83
|
+
])
|
|
84
|
+
.optional(),
|
|
85
|
+
metadata: z
|
|
86
|
+
.object({})
|
|
87
|
+
.catchall(z.any())
|
|
88
|
+
.describe("Metadata for the run.")
|
|
89
|
+
.optional(),
|
|
90
|
+
config: AssistantConfig.optional(),
|
|
91
|
+
webhook: z.string().url().optional(),
|
|
92
|
+
interrupt_before: z.union([z.enum(["*"]), z.array(z.string())]).optional(),
|
|
93
|
+
interrupt_after: z.union([z.enum(["*"]), z.array(z.string())]).optional(),
|
|
94
|
+
multitask_strategy: z
|
|
95
|
+
.enum(["reject", "rollback", "interrupt", "enqueue"])
|
|
96
|
+
.optional(),
|
|
97
|
+
})
|
|
98
|
+
.describe("Payload for creating a cron.");
|
|
99
|
+
export const CronSearch = z
|
|
100
|
+
.object({
|
|
101
|
+
assistant_id: z.string().uuid().optional(),
|
|
102
|
+
thread_id: z.string().uuid().optional(),
|
|
103
|
+
limit: z
|
|
104
|
+
.number()
|
|
105
|
+
.int()
|
|
106
|
+
.gte(1)
|
|
107
|
+
.lte(1000)
|
|
108
|
+
.describe("Maximum number to return.")
|
|
109
|
+
.optional(),
|
|
110
|
+
offset: z
|
|
111
|
+
.number()
|
|
112
|
+
.int()
|
|
113
|
+
.gte(0)
|
|
114
|
+
.describe("Offset to start from.")
|
|
115
|
+
.optional(),
|
|
116
|
+
})
|
|
117
|
+
.describe("Payload for listing crons");
|
|
118
|
+
export const GraphSchema = z.object({
|
|
119
|
+
graph_id: z.string(),
|
|
120
|
+
input_schema: z.object({}).catchall(z.any()).optional(),
|
|
121
|
+
output_schema: z.object({}).catchall(z.any()).optional(),
|
|
122
|
+
state_schema: z.object({}).catchall(z.any()),
|
|
123
|
+
config_schema: z.object({}).catchall(z.any()),
|
|
124
|
+
});
|
|
125
|
+
export const Run = z.object({
|
|
126
|
+
run_id: z.string().uuid(),
|
|
127
|
+
thread_id: z.string().uuid(),
|
|
128
|
+
assistant_id: z.string().uuid(),
|
|
129
|
+
created_at: z.string(),
|
|
130
|
+
updated_at: z.string(),
|
|
131
|
+
status: z.enum([
|
|
132
|
+
"pending",
|
|
133
|
+
"running",
|
|
134
|
+
"error",
|
|
135
|
+
"success",
|
|
136
|
+
"timeout",
|
|
137
|
+
"interrupted",
|
|
138
|
+
]),
|
|
139
|
+
metadata: z.object({}).catchall(z.any()),
|
|
140
|
+
kwargs: z.object({}).catchall(z.any()),
|
|
141
|
+
multitask_strategy: z.enum(["reject", "rollback", "interrupt", "enqueue"]),
|
|
142
|
+
});
|
|
143
|
+
export const RunCreate = z
|
|
144
|
+
.object({
|
|
145
|
+
assistant_id: z.union([z.string().uuid(), z.string()]),
|
|
146
|
+
checkpoint_id: z.string().optional(),
|
|
147
|
+
checkpoint: CheckpointSchema.optional(),
|
|
148
|
+
input: z.union([z.unknown(), z.null()]).optional(),
|
|
149
|
+
command: z
|
|
150
|
+
.object({
|
|
151
|
+
goto: z
|
|
152
|
+
.union([
|
|
153
|
+
z.union([
|
|
154
|
+
z.string(),
|
|
155
|
+
z.object({ node: z.string(), input: z.unknown().optional() }),
|
|
156
|
+
]),
|
|
157
|
+
z.array(z.union([
|
|
158
|
+
z.string(),
|
|
159
|
+
z.object({ node: z.string(), input: z.unknown().optional() }),
|
|
160
|
+
])),
|
|
161
|
+
])
|
|
162
|
+
.optional(),
|
|
163
|
+
update: z
|
|
164
|
+
.union([
|
|
165
|
+
z.record(z.unknown()),
|
|
166
|
+
z.array(z.tuple([z.string(), z.unknown()])),
|
|
167
|
+
])
|
|
168
|
+
.optional(),
|
|
169
|
+
resume: z.unknown().optional(),
|
|
170
|
+
})
|
|
171
|
+
.optional(),
|
|
172
|
+
metadata: z
|
|
173
|
+
.object({})
|
|
174
|
+
.catchall(z.any())
|
|
175
|
+
.describe("Metadata for the run.")
|
|
176
|
+
.optional(),
|
|
177
|
+
config: AssistantConfig.optional(),
|
|
178
|
+
webhook: z.string().url().optional(),
|
|
179
|
+
interrupt_before: z.union([z.enum(["*"]), z.array(z.string())]).optional(),
|
|
180
|
+
interrupt_after: z.union([z.enum(["*"]), z.array(z.string())]).optional(),
|
|
181
|
+
on_disconnect: z
|
|
182
|
+
.enum(["cancel", "continue"])
|
|
183
|
+
.optional()
|
|
184
|
+
.default("continue"),
|
|
185
|
+
multitask_strategy: z
|
|
186
|
+
.enum(["reject", "rollback", "interrupt", "enqueue"])
|
|
187
|
+
.optional(),
|
|
188
|
+
stream_mode: z
|
|
189
|
+
.union([
|
|
190
|
+
z.array(z.enum([
|
|
191
|
+
"values",
|
|
192
|
+
"messages",
|
|
193
|
+
"messages-tuple",
|
|
194
|
+
"updates",
|
|
195
|
+
"events",
|
|
196
|
+
"debug",
|
|
197
|
+
])),
|
|
198
|
+
z.enum([
|
|
199
|
+
"values",
|
|
200
|
+
"messages",
|
|
201
|
+
"messages-tuple",
|
|
202
|
+
"updates",
|
|
203
|
+
"events",
|
|
204
|
+
"debug",
|
|
205
|
+
]),
|
|
206
|
+
])
|
|
207
|
+
.optional(),
|
|
208
|
+
stream_subgraphs: z.boolean().optional(),
|
|
209
|
+
after_seconds: z.number().optional(),
|
|
210
|
+
if_not_exists: z.enum(["reject", "create"]).optional(),
|
|
211
|
+
on_completion: z.enum(["delete", "keep"]).optional(),
|
|
212
|
+
feedback_keys: z.array(z.string()).optional(),
|
|
213
|
+
})
|
|
214
|
+
.describe("Payload for creating a stateful run.");
|
|
215
|
+
export const RunBatchCreate = z
|
|
216
|
+
.array(RunCreate)
|
|
217
|
+
.min(1)
|
|
218
|
+
.describe("Payload for creating a batch of runs.");
|
|
219
|
+
export const SearchResult = z
|
|
220
|
+
.object({
|
|
221
|
+
metadata: z
|
|
222
|
+
.object({})
|
|
223
|
+
.catchall(z.any())
|
|
224
|
+
.describe("Metadata to search for.")
|
|
225
|
+
.optional(),
|
|
226
|
+
limit: z
|
|
227
|
+
.number()
|
|
228
|
+
.int()
|
|
229
|
+
.gte(1)
|
|
230
|
+
.lte(1000)
|
|
231
|
+
.describe("Maximum number to return.")
|
|
232
|
+
.optional(),
|
|
233
|
+
offset: z
|
|
234
|
+
.number()
|
|
235
|
+
.int()
|
|
236
|
+
.gte(0)
|
|
237
|
+
.describe("Offset to start from.")
|
|
238
|
+
.optional(),
|
|
239
|
+
})
|
|
240
|
+
.describe("Payload for listing runs.");
|
|
241
|
+
export const AssistantSearchRequest = z
|
|
242
|
+
.object({
|
|
243
|
+
metadata: z
|
|
244
|
+
.object({})
|
|
245
|
+
.catchall(z.any())
|
|
246
|
+
.describe("Metadata to search for.")
|
|
247
|
+
.optional(),
|
|
248
|
+
graph_id: z.string().describe("Filter by graph ID.").optional(),
|
|
249
|
+
limit: z
|
|
250
|
+
.number()
|
|
251
|
+
.int()
|
|
252
|
+
.gte(1)
|
|
253
|
+
.lte(1000)
|
|
254
|
+
.describe("Maximum number to return.")
|
|
255
|
+
.optional(),
|
|
256
|
+
offset: z
|
|
257
|
+
.number()
|
|
258
|
+
.int()
|
|
259
|
+
.gte(0)
|
|
260
|
+
.describe("Offset to start from.")
|
|
261
|
+
.optional(),
|
|
262
|
+
})
|
|
263
|
+
.describe("Payload for listing assistants.");
|
|
264
|
+
export const ThreadSearchRequest = z
|
|
265
|
+
.object({
|
|
266
|
+
metadata: z
|
|
267
|
+
.record(z.unknown())
|
|
268
|
+
.describe("Metadata to search for.")
|
|
269
|
+
.optional(),
|
|
270
|
+
status: z
|
|
271
|
+
.enum(["idle", "busy", "interrupted", "error"])
|
|
272
|
+
.describe("Filter by thread status.")
|
|
273
|
+
.optional(),
|
|
274
|
+
values: z
|
|
275
|
+
.record(z.unknown())
|
|
276
|
+
.describe("Filter by thread values.")
|
|
277
|
+
.optional(),
|
|
278
|
+
limit: z
|
|
279
|
+
.number()
|
|
280
|
+
.int()
|
|
281
|
+
.gte(1)
|
|
282
|
+
.lte(1000)
|
|
283
|
+
.describe("Maximum number to return.")
|
|
284
|
+
.optional(),
|
|
285
|
+
offset: z
|
|
286
|
+
.number()
|
|
287
|
+
.int()
|
|
288
|
+
.gte(0)
|
|
289
|
+
.describe("Offset to start from.")
|
|
290
|
+
.optional(),
|
|
291
|
+
})
|
|
292
|
+
.describe("Payload for listing threads.");
|
|
293
|
+
export const Thread = z.object({
|
|
294
|
+
thread_id: z.string().uuid(),
|
|
295
|
+
created_at: z.string(),
|
|
296
|
+
updated_at: z.string(),
|
|
297
|
+
metadata: z.record(z.unknown()).optional(),
|
|
298
|
+
status: z.enum(["idle", "busy", "interrupted", "error"]).optional(),
|
|
299
|
+
});
|
|
300
|
+
export const ThreadCreate = z
|
|
301
|
+
.object({
|
|
302
|
+
thread_id: z
|
|
303
|
+
.string()
|
|
304
|
+
.uuid()
|
|
305
|
+
.describe("The ID of the thread. If not provided, an ID is generated.")
|
|
306
|
+
.optional(),
|
|
307
|
+
metadata: z
|
|
308
|
+
.object({})
|
|
309
|
+
.catchall(z.any())
|
|
310
|
+
.describe("Metadata for the thread.")
|
|
311
|
+
.optional(),
|
|
312
|
+
if_exists: z
|
|
313
|
+
.union([z.literal("raise"), z.literal("do_nothing")])
|
|
314
|
+
.optional(),
|
|
315
|
+
})
|
|
316
|
+
.describe("Payload for creating a thread.");
|
|
317
|
+
export const ThreadPatch = z
|
|
318
|
+
.object({
|
|
319
|
+
metadata: z
|
|
320
|
+
.object({})
|
|
321
|
+
.catchall(z.any())
|
|
322
|
+
.describe("Metadata to merge with existing thread metadata.")
|
|
323
|
+
.optional(),
|
|
324
|
+
})
|
|
325
|
+
.describe("Payload for patching a thread.");
|
|
326
|
+
export const ThreadState = z.object({
|
|
327
|
+
values: z.union([
|
|
328
|
+
z.array(z.object({}).catchall(z.any())),
|
|
329
|
+
z.object({}).catchall(z.any()),
|
|
330
|
+
]),
|
|
331
|
+
next: z.array(z.string()),
|
|
332
|
+
checkpoint_id: z.string(),
|
|
333
|
+
metadata: z.object({}).catchall(z.any()),
|
|
334
|
+
created_at: z.string(),
|
|
335
|
+
parent_checkpoint_id: z.string(),
|
|
336
|
+
});
|
|
337
|
+
export const ThreadStatePatch = z
|
|
338
|
+
.object({ metadata: z.object({}).catchall(z.any()) })
|
|
339
|
+
.describe("Payload for patching state of a thread.");
|
|
340
|
+
export const ThreadStateSearch = z.object({
|
|
341
|
+
limit: z
|
|
342
|
+
.number()
|
|
343
|
+
.int()
|
|
344
|
+
.gte(1)
|
|
345
|
+
.lte(1000)
|
|
346
|
+
.describe("The maximum number of states to return.")
|
|
347
|
+
.optional(),
|
|
348
|
+
before: z
|
|
349
|
+
.string()
|
|
350
|
+
.describe("Return states before this checkpoint ID.")
|
|
351
|
+
.optional(),
|
|
352
|
+
metadata: z
|
|
353
|
+
.object({})
|
|
354
|
+
.catchall(z.any())
|
|
355
|
+
.describe("Filter states by metadata key-value pairs.")
|
|
356
|
+
.optional(),
|
|
357
|
+
});
|
|
358
|
+
export const ThreadStateUpdate = z
|
|
359
|
+
.object({
|
|
360
|
+
values: z
|
|
361
|
+
.union([
|
|
362
|
+
z.array(z.object({}).catchall(z.any())),
|
|
363
|
+
z.object({}).catchall(z.any()),
|
|
364
|
+
z.null(),
|
|
365
|
+
])
|
|
366
|
+
.optional(),
|
|
367
|
+
checkpoint_id: z.string().optional(),
|
|
368
|
+
as_node: z.string().optional(),
|
|
369
|
+
})
|
|
370
|
+
.describe("Payload for adding state to a thread.");
|
|
371
|
+
export const AssistantLatestVersion = z.object({
|
|
372
|
+
version: z.number(),
|
|
373
|
+
});
|
|
374
|
+
export const StoreListNamespaces = z.object({
|
|
375
|
+
prefix: z.array(z.string()).optional(),
|
|
376
|
+
suffix: z.array(z.string()).optional(),
|
|
377
|
+
max_depth: z.number().optional(),
|
|
378
|
+
limit: z.number().default(100).optional(),
|
|
379
|
+
offset: z.number().default(0).optional(),
|
|
380
|
+
});
|
|
381
|
+
export const StoreSearchItems = z.object({
|
|
382
|
+
namespace_prefix: z.array(z.string()),
|
|
383
|
+
filter: z.record(z.unknown()).optional(),
|
|
384
|
+
limit: z.number().default(10).optional(),
|
|
385
|
+
offset: z.number().default(0).optional(),
|
|
386
|
+
query: z.string().optional(),
|
|
387
|
+
});
|
|
388
|
+
export const StorePutItem = z.object({
|
|
389
|
+
namespace: z.array(z.string()),
|
|
390
|
+
key: z.string(),
|
|
391
|
+
value: z.record(z.unknown()),
|
|
392
|
+
});
|
|
393
|
+
export const StoreDeleteItem = z.object({
|
|
394
|
+
namespace: z.array(z.string()).optional(),
|
|
395
|
+
key: z.string(),
|
|
396
|
+
});
|
|
397
|
+
export const StoreGetItem = z.object({
|
|
398
|
+
namespace: z
|
|
399
|
+
.string()
|
|
400
|
+
.optional()
|
|
401
|
+
.transform((value) => value?.split(".") ?? []),
|
|
402
|
+
key: z.string(),
|
|
403
|
+
});
|
|
404
|
+
export const coercedBoolean = z.string().transform((val) => {
|
|
405
|
+
const lower = val.toLowerCase();
|
|
406
|
+
return lower === "true" || lower === "1" || lower === "yes";
|
|
407
|
+
});
|