@blocknote/xl-ai 0.39.1 → 0.39.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/webpack-stats.json +1 -1
- package/package.json +4 -4
- package/types/src/api/aiRequest/defaultAIRequestSender.d.ts +4 -0
- package/types/src/api/aiRequest/execute.d.ts +31 -0
- package/types/src/api/aiRequest/index.d.ts +3 -0
- package/types/src/api/aiRequest/types.d.ts +42 -0
- package/types/src/api/formats/index.d.ts +166 -0
- package/types/src/streamTool/StreamToolExecutor.d.ts +80 -0
- package/types/src/streamTool/index.d.ts +6 -0
- package/types/src/streamTool/toolDefinitionsToToolSet.d.ts +9 -0
- package/types/src/streamTool/vercelAiSdk/clientside/ClientSideTransport.d.ts +109 -0
- package/types/src/streamTool/vercelAiSdk/util/UIMessageStreamToOperationsResult.d.ts +29 -0
- package/types/src/streamTool/vercelAiSdk/util/chatHandlers.d.ts +23 -0
- package/types/src/streamTool/vercelAiSdk/util/partialObjectStreamUtil.d.ts +30 -0
- package/types/src/types.d.ts +56 -0
package/dist/webpack-stats.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"builtAt":
|
|
1
|
+
{"builtAt":1760024247466,"assets":[{"name":"blocknote-xl-ai.cjs","size":60375},{"name":"locales.cjs","size":23791},{"name":"locales.cjs.map","size":55283},{"name":"blocknote-xl-ai.cjs.map","size":2396837}],"chunks":[{"id":"a1ee98a","entry":true,"initial":true,"files":["blocknote-xl-ai.cjs"],"names":["blocknote-xl-ai"]},{"id":"842df4c","entry":true,"initial":true,"files":["locales.cjs"],"names":["locales"]}],"modules":[{"name":"./src/style.css","size":0,"chunks":["a1ee98a"]},{"name":"../../node_modules/.pnpm/zustand@5.0.3_@types+react@19.1.8_immer@10.1.1_react@19.1.0_use-sync-external-store@1.5.0_react@19.1.0_/node_modules/zustand/esm/vanilla.mjs","size":973,"chunks":["a1ee98a"]},{"name":"./src/streamTool/jsonSchema.ts","size":1194,"chunks":["a1ee98a"]},{"name":"./src/util/stream.ts","size":1069,"chunks":["a1ee98a"]},{"name":"./src/streamTool/filterNewOrUpdatedOperations.ts","size":891,"chunks":["a1ee98a"]},{"name":"./src/streamTool/filterValidOperations.ts","size":647,"chunks":["a1ee98a"]},{"name":"./src/streamTool/toValidatedOperations.ts","size":753,"chunks":["a1ee98a"]},{"name":"./src/streamTool/preprocess.ts","size":848,"chunks":["a1ee98a"]},{"name":"./src/streamTool/callLLMWithStreamTools.ts","size":6023,"chunks":["a1ee98a"]},{"name":"./src/util/emptyBlock.ts","size":170,"chunks":["a1ee98a"]},{"name":"./src/api/LLMResponse.ts","size":1039,"chunks":["a1ee98a"]},{"name":"./src/util/trimArray.ts","size":327,"chunks":["a1ee98a"]},{"name":"./src/api/promptHelpers/trimEmptyBlocks.ts","size":272,"chunks":["a1ee98a"]},{"name":"./src/api/promptHelpers/addCursorPosition.ts","size":390,"chunks":["a1ee98a"]},{"name":"./src/api/promptHelpers/convertBlocks.ts","size":183,"chunks":["a1ee98a"]},{"name":"./src/api/promptHelpers/flattenBlocks.ts","size":162,"chunks":["a1ee98a"]},{"name":"./src/api/promptHelpers/suffixIds.ts","size":204,"chunks":["a1ee98a"]},{"name":"./src/api/formats/html-blocks/htmlPromptData.ts","size":1189,"chunks":["a1ee98a"]},{"name":"./src/api/formats/html-blocks/defaultHTMLPromptBuilder.ts","size":5301,"chunks":["a1ee98a"]},{"name":"./src/prosemirror/fragmentUtil.ts","size":262,"chunks":["a1ee98a"]},{"name":"./src/prosemirror/agent.ts","size":6220,"chunks":["a1ee98a"]},{"name":"./src/prosemirror/changeset.ts","size":6000,"chunks":["a1ee98a"]},{"name":"./src/streamTool/streamTool.ts","size":48,"chunks":["a1ee98a"]},{"name":"./src/api/formats/base-tools/util/validateBlockArray.ts","size":574,"chunks":["a1ee98a"]},{"name":"./src/api/formats/base-tools/createAddBlocksTool.ts","size":5316,"chunks":["a1ee98a"]},{"name":"./src/api/formats/base-tools/createUpdateBlockTool.ts","size":4078,"chunks":["a1ee98a"]},{"name":"./src/api/formats/base-tools/delete.ts","size":1766,"chunks":["a1ee98a"]},{"name":"./src/api/formats/html-blocks/tools/getPartialHTML.ts","size":699,"chunks":["a1ee98a"]},{"name":"./src/prosemirror/rebaseTool.ts","size":1508,"chunks":["a1ee98a"]},{"name":"./src/api/formats/html-blocks/tools/rebaseTool.ts","size":829,"chunks":["a1ee98a"]},{"name":"./src/api/formats/html-blocks/tools/validate.ts","size":204,"chunks":["a1ee98a"]},{"name":"./src/api/formats/html-blocks/tools/index.ts","size":1886,"chunks":["a1ee98a"]},{"name":"./src/api/formats/html-blocks/htmlBlocks.ts","size":1283,"chunks":["a1ee98a"]},{"name":"./src/api/LLMRequest.ts","size":2565,"chunks":["a1ee98a"]},{"name":"./src/api/formats/json/jsonPromptData.ts","size":1298,"chunks":["a1ee98a"]},{"name":"./src/api/formats/json/defaultJSONPromptBuilder.ts","size":3694,"chunks":["a1ee98a"]},{"name":"./src/api/schema/mergeSchema.ts","size":829,"chunks":["a1ee98a"]},{"name":"./src/api/schema/schemaToJSONSchema.ts","size":4898,"chunks":["a1ee98a"]},{"name":"./src/api/formats/json/tools/validate.ts","size":1603,"chunks":["a1ee98a"]},{"name":"./src/api/formats/json/tools/index.ts","size":1324,"chunks":["a1ee98a"]},{"name":"./src/api/formats/json/json.ts","size":1220,"chunks":["a1ee98a"]},{"name":"./src/api/formats/markdown-blocks/markdownPromptData.ts","size":1209,"chunks":["a1ee98a"]},{"name":"./src/api/formats/markdown-blocks/defaultMarkdownPromptBuilder.ts","size":2744,"chunks":["a1ee98a"]},{"name":"./src/api/formats/markdown-blocks/tools/rebaseTool.ts","size":633,"chunks":["a1ee98a"]},{"name":"./src/api/formats/markdown-blocks/tools/validate.ts","size":202,"chunks":["a1ee98a"]},{"name":"./src/api/formats/markdown-blocks/tools/index.ts","size":1505,"chunks":["a1ee98a"]},{"name":"./src/api/formats/markdown-blocks/markdownBlocks.ts","size":1148,"chunks":["a1ee98a"]},{"name":"./src/api/promptHelpers/index.ts","size":112,"chunks":["a1ee98a"]},{"name":"./src/api/index.ts","size":138,"chunks":["a1ee98a"]},{"name":"./src/plugins/AgentCursorPlugin.ts","size":2444,"chunks":["a1ee98a"]},{"name":"./src/AIExtension.ts","size":6605,"chunks":["a1ee98a"]},{"name":"./src/blocknoteAIClient/client.ts","size":1601,"chunks":["a1ee98a"]},{"name":"../../node_modules/.pnpm/react-icons@5.5.0_react@19.1.0/node_modules/react-icons/lib/iconContext.mjs","size":251,"chunks":["a1ee98a"]},{"name":"../../node_modules/.pnpm/react-icons@5.5.0_react@19.1.0/node_modules/react-icons/lib/iconBase.mjs","size":4003,"chunks":["a1ee98a"]},{"name":"../../node_modules/.pnpm/react-icons@5.5.0_react@19.1.0/node_modules/react-icons/ri/index.mjs","size":6747,"chunks":["a1ee98a"]},{"name":"./src/i18n/dictionary.ts","size":150,"chunks":["a1ee98a"]},{"name":"./src/i18n/useAIDictionary.ts","size":111,"chunks":["a1ee98a"]},{"name":"./src/components/AIMenu/PromptSuggestionMenu.tsx","size":2901,"chunks":["a1ee98a"]},{"name":"./src/components/AIMenu/getDefaultAIMenuItems.tsx","size":6990,"chunks":["a1ee98a"]},{"name":"./src/components/AIMenu/AIMenu.tsx","size":3523,"chunks":["a1ee98a"]},{"name":"./src/components/AIMenu/BlockPositioner.tsx","size":1422,"chunks":["a1ee98a"]},{"name":"./src/components/AIMenu/AIMenuController.tsx","size":916,"chunks":["a1ee98a"]},{"name":"./src/components/FormattingToolbar/AIToolbarButton.tsx","size":853,"chunks":["a1ee98a"]},{"name":"./src/components/SuggestionMenu/getAISlashMenuItems.tsx","size":679,"chunks":["a1ee98a"]},{"name":"./src/index.ts","size":0,"chunks":["a1ee98a"]},{"name":"./src/i18n/locales/ar.ts","size":1531,"chunks":["842df4c"]},{"name":"./src/i18n/locales/de.ts","size":1585,"chunks":["842df4c"]},{"name":"./src/i18n/locales/en.ts","size":1483,"chunks":["842df4c"]},{"name":"./src/i18n/locales/es.ts","size":1553,"chunks":["842df4c"]},{"name":"./src/i18n/locales/fr.ts","size":1569,"chunks":["842df4c"]},{"name":"./src/i18n/locales/he.ts","size":1401,"chunks":["842df4c"]},{"name":"./src/i18n/locales/hr.ts","size":1494,"chunks":["842df4c"]},{"name":"./src/i18n/locales/is.ts","size":1562,"chunks":["842df4c"]},{"name":"./src/i18n/locales/it.ts","size":1555,"chunks":["842df4c"]},{"name":"./src/i18n/locales/ja.ts","size":1372,"chunks":["842df4c"]},{"name":"./src/i18n/locales/ko.ts","size":1394,"chunks":["842df4c"]},{"name":"./src/i18n/locales/nl.ts","size":1536,"chunks":["842df4c"]},{"name":"./src/i18n/locales/no.ts","size":1509,"chunks":["842df4c"]},{"name":"./src/i18n/locales/pl.ts","size":1510,"chunks":["842df4c"]},{"name":"./src/i18n/locales/pt.ts","size":1548,"chunks":["842df4c"]},{"name":"./src/i18n/locales/ru.ts","size":1549,"chunks":["842df4c"]},{"name":"./src/i18n/locales/sk.ts","size":1511,"chunks":["842df4c"]},{"name":"./src/i18n/locales/uk.ts","size":1556,"chunks":["842df4c"]},{"name":"./src/i18n/locales/vi.ts","size":1506,"chunks":["842df4c"]},{"name":"./src/i18n/locales/zh-tw.ts","size":1345,"chunks":["842df4c"]},{"name":"./src/i18n/locales/zh.ts","size":1340,"chunks":["842df4c"]},{"name":"./src/i18n/locales/index.ts","size":0,"chunks":["842df4c"]}]}
|
package/package.json
CHANGED
|
@@ -11,7 +11,7 @@
|
|
|
11
11
|
"directory": "packages/xl-ai"
|
|
12
12
|
},
|
|
13
13
|
"license": "GPL-3.0 OR PROPRIETARY",
|
|
14
|
-
"version": "0.39.
|
|
14
|
+
"version": "0.39.2",
|
|
15
15
|
"files": [
|
|
16
16
|
"dist",
|
|
17
17
|
"types",
|
|
@@ -56,10 +56,10 @@
|
|
|
56
56
|
}
|
|
57
57
|
},
|
|
58
58
|
"dependencies": {
|
|
59
|
-
"@blocknote/core": "0.39.
|
|
60
|
-
"@blocknote/mantine": "0.39.
|
|
59
|
+
"@blocknote/core": "0.39.2",
|
|
60
|
+
"@blocknote/mantine": "0.39.2",
|
|
61
61
|
"@blocknote/prosemirror-suggest-changes": "^0.1.3",
|
|
62
|
-
"@blocknote/react": "0.39.
|
|
62
|
+
"@blocknote/react": "0.39.2",
|
|
63
63
|
"@floating-ui/react": "^0.26.4",
|
|
64
64
|
"@tiptap/core": "^3.4.3",
|
|
65
65
|
"ai": "^4.3.15",
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import { HTMLPromptData } from "../formats/html-blocks/htmlPromptData.js";
|
|
2
|
+
import { PromptBuilder, PromptInputDataBuilder } from "../formats/PromptBuilder.js";
|
|
3
|
+
import { AIRequestSender } from "./types.js";
|
|
4
|
+
export declare function defaultAIRequestSender<E = HTMLPromptData>(promptBuilder: PromptBuilder<E>, promptInputDataBuilder: PromptInputDataBuilder<E>): AIRequestSender;
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { Chat } from "@ai-sdk/react";
|
|
2
|
+
import { BlockNoteEditor } from "@blocknote/core";
|
|
3
|
+
import { ChatRequestOptions, UIMessage } from "ai";
|
|
4
|
+
import { StreamToolsProvider } from "../index.js";
|
|
5
|
+
import { AIRequest, AIRequestSender } from "./types.js";
|
|
6
|
+
export declare function buildAIRequest(opts: {
|
|
7
|
+
editor: BlockNoteEditor<any, any, any>;
|
|
8
|
+
chat: Chat<UIMessage>;
|
|
9
|
+
userPrompt: string;
|
|
10
|
+
useSelection?: boolean;
|
|
11
|
+
deleteEmptyCursorBlock?: boolean;
|
|
12
|
+
streamToolsProvider?: StreamToolsProvider<any, any>;
|
|
13
|
+
onBlockUpdated?: (blockId: string) => void;
|
|
14
|
+
}): {
|
|
15
|
+
editor: BlockNoteEditor<any, any, any>;
|
|
16
|
+
chat: Chat<UIMessage<unknown, import("ai").UIDataTypes, import("ai").UITools>>;
|
|
17
|
+
userPrompt: string;
|
|
18
|
+
selectedBlocks: import("@blocknote/core").Block<Record<string, import("@blocknote/core").BlockConfig<string, import("@blocknote/core").PropSchema, "inline" | "none" | "table">>, import("@blocknote/core").InlineContentSchema, import("@blocknote/core").StyleSchema>[] | undefined;
|
|
19
|
+
streamTools: [] | [import("../index.js").DeleteTool] | [import("../index.js").UpdateTool<string>] | [import("../index.js").AddTool<string>] | [import("../index.js").AddTool<string>, import("../index.js").DeleteTool] | [import("../index.js").UpdateTool<string>, import("../index.js").DeleteTool] | [import("../index.js").UpdateTool<string>, import("../index.js").AddTool<string>] | [import("../index.js").UpdateTool<string>, import("../index.js").AddTool<string>, import("../index.js").DeleteTool] | [import("../index.js").UpdateTool<any>] | [import("../index.js").AddTool<any>] | [import("../index.js").AddTool<any>, import("../index.js").DeleteTool] | [import("../index.js").UpdateTool<any>, import("../index.js").DeleteTool] | [import("../index.js").UpdateTool<any>, import("../index.js").AddTool<any>] | [import("../index.js").UpdateTool<any>, import("../index.js").AddTool<any>, import("../index.js").DeleteTool];
|
|
20
|
+
emptyCursorBlockToDelete: string | undefined;
|
|
21
|
+
};
|
|
22
|
+
/**
|
|
23
|
+
* Sends an LLM Request to the LLM backend and processes streaming tool calls
|
|
24
|
+
* made by the LLM
|
|
25
|
+
*/
|
|
26
|
+
export declare function executeAIRequest(opts: {
|
|
27
|
+
aiRequest: AIRequest;
|
|
28
|
+
sender: AIRequestSender;
|
|
29
|
+
chatRequestOptions?: ChatRequestOptions;
|
|
30
|
+
onStart?: () => void;
|
|
31
|
+
}): Promise<void>;
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { Chat } from "@ai-sdk/react";
|
|
2
|
+
import { Block, BlockNoteEditor } from "@blocknote/core";
|
|
3
|
+
import { UIMessage } from "ai";
|
|
4
|
+
import { StreamTool } from "../../streamTool/streamTool.js";
|
|
5
|
+
import { ChatRequestOptions } from "../../types.js";
|
|
6
|
+
/**
|
|
7
|
+
* An AIRequest represents a user request for an editor AI call
|
|
8
|
+
*/
|
|
9
|
+
export type AIRequest = {
|
|
10
|
+
/**
|
|
11
|
+
* The editor from which we can read document state
|
|
12
|
+
*/
|
|
13
|
+
editor: BlockNoteEditor<any, any, any>;
|
|
14
|
+
/**
|
|
15
|
+
* The chat object (from the AI SDK)
|
|
16
|
+
* is used to keep Message history, and to submit the LLM request via the underlying transport to the LLM
|
|
17
|
+
*/
|
|
18
|
+
chat: Chat<UIMessage>;
|
|
19
|
+
/**
|
|
20
|
+
* The user's prompt
|
|
21
|
+
*/
|
|
22
|
+
userPrompt: string;
|
|
23
|
+
/**
|
|
24
|
+
* The selection of the editor which the LLM should operate on
|
|
25
|
+
*/
|
|
26
|
+
selectedBlocks?: Block<any, any, any>[];
|
|
27
|
+
/**
|
|
28
|
+
* The id of the block that should be excluded from the LLM call,
|
|
29
|
+
* this is used when using the AI slash menu in an empty block
|
|
30
|
+
*/
|
|
31
|
+
emptyCursorBlockToDelete?: string;
|
|
32
|
+
/**
|
|
33
|
+
* The stream tools that can be used by the LLM
|
|
34
|
+
*/
|
|
35
|
+
streamTools: StreamTool<any>[];
|
|
36
|
+
};
|
|
37
|
+
/**
|
|
38
|
+
* Responsible for submitting a BlockNote `AIRequest` to the Vercel AI SDK.
|
|
39
|
+
*/
|
|
40
|
+
export type AIRequestSender = {
|
|
41
|
+
sendAIRequest: (AIRequest: AIRequest, options: ChatRequestOptions) => Promise<void>;
|
|
42
|
+
};
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import { BlockNoteEditor } from "@blocknote/core";
|
|
2
|
+
import { StreamTool } from "../../streamTool/streamTool.js";
|
|
3
|
+
import { AddBlocksToolCall } from "./base-tools/createAddBlocksTool.js";
|
|
4
|
+
import { UpdateBlockToolCall } from "./base-tools/createUpdateBlockTool.js";
|
|
5
|
+
import { DeleteBlockToolCall } from "./base-tools/delete.js";
|
|
6
|
+
import { HTMLPromptData } from "./html-blocks/htmlPromptData.js";
|
|
7
|
+
import { PromptBuilder } from "./PromptBuilder.js";
|
|
8
|
+
export type AddTool<T> = StreamTool<AddBlocksToolCall<T>>;
|
|
9
|
+
export type UpdateTool<T> = StreamTool<UpdateBlockToolCall<T>>;
|
|
10
|
+
export type DeleteTool = StreamTool<DeleteBlockToolCall>;
|
|
11
|
+
export type StreamToolsConfig = {
|
|
12
|
+
add?: boolean;
|
|
13
|
+
update?: boolean;
|
|
14
|
+
delete?: boolean;
|
|
15
|
+
};
|
|
16
|
+
export type StreamToolsResult<TT, T extends StreamToolsConfig> = [
|
|
17
|
+
...(T extends {
|
|
18
|
+
update: true;
|
|
19
|
+
} ? [UpdateTool<TT>] : []),
|
|
20
|
+
...(T extends {
|
|
21
|
+
add: true;
|
|
22
|
+
} ? [AddTool<TT>] : []),
|
|
23
|
+
...(T extends {
|
|
24
|
+
delete: true;
|
|
25
|
+
} ? [DeleteTool] : [])
|
|
26
|
+
];
|
|
27
|
+
export type StreamToolsProvider<TT, T extends StreamToolsConfig = {
|
|
28
|
+
add: true;
|
|
29
|
+
update: true;
|
|
30
|
+
delete: true;
|
|
31
|
+
}> = {
|
|
32
|
+
getStreamTools: (editor: BlockNoteEditor<any, any, any>, selectionInfo?: {
|
|
33
|
+
from: number;
|
|
34
|
+
to: number;
|
|
35
|
+
} | boolean, onBlockUpdate?: (blockId: string) => void) => StreamToolsResult<TT, T>;
|
|
36
|
+
};
|
|
37
|
+
export type AIDocumentFormat<TT> = {
|
|
38
|
+
/**
|
|
39
|
+
* Function to get the stream tools that can apply HTML block updates to the editor
|
|
40
|
+
*/
|
|
41
|
+
getStreamToolsProvider: <T extends StreamToolsConfig>(withDelays: boolean, defaultStreamTools?: T) => StreamToolsProvider<TT, T>;
|
|
42
|
+
/**
|
|
43
|
+
* The default PromptBuilder that determines how a userPrompt is converted to an array of
|
|
44
|
+
* LLM Messages (CoreMessage[])
|
|
45
|
+
*/
|
|
46
|
+
defaultPromptBuilder: PromptBuilder<HTMLPromptData>;
|
|
47
|
+
/**
|
|
48
|
+
* Helper functions which can be used when implementing a custom PromptBuilder.
|
|
49
|
+
* The signature depends on the specific format
|
|
50
|
+
*/
|
|
51
|
+
promptHelpers: any;
|
|
52
|
+
};
|
|
53
|
+
export declare const aiDocumentFormats: {
|
|
54
|
+
_experimental_json: {
|
|
55
|
+
getStreamToolsProvider: <T extends StreamToolsConfig = {
|
|
56
|
+
add: true;
|
|
57
|
+
update: true;
|
|
58
|
+
delete: true;
|
|
59
|
+
}>(opts?: {
|
|
60
|
+
withDelays?: boolean;
|
|
61
|
+
defaultStreamTools?: T;
|
|
62
|
+
}) => StreamToolsProvider<string, T>;
|
|
63
|
+
tools: {
|
|
64
|
+
add: (editor: BlockNoteEditor<any, any, any>, options: {
|
|
65
|
+
idsSuffixed: boolean;
|
|
66
|
+
withDelays: boolean;
|
|
67
|
+
onBlockUpdate?: ((blockId: string) => void) | undefined;
|
|
68
|
+
}) => StreamTool<AddBlocksToolCall<import("@blocknote/core").PartialBlock<any, any, any>>>;
|
|
69
|
+
update: (editor: BlockNoteEditor<any, any, any>, options: {
|
|
70
|
+
idsSuffixed: boolean;
|
|
71
|
+
withDelays: boolean;
|
|
72
|
+
updateSelection?: {
|
|
73
|
+
from: number;
|
|
74
|
+
to: number;
|
|
75
|
+
} | undefined;
|
|
76
|
+
onBlockUpdate?: ((blockId: string) => void) | undefined;
|
|
77
|
+
}) => StreamTool<UpdateBlockToolCall<import("@blocknote/core").PartialBlock<any, any, any>>>;
|
|
78
|
+
delete: (editor: BlockNoteEditor<any, any, any>, options: {
|
|
79
|
+
idsSuffixed: boolean;
|
|
80
|
+
withDelays: boolean;
|
|
81
|
+
onBlockUpdate?: (blockId: string) => void;
|
|
82
|
+
}) => StreamTool<DeleteBlockToolCall>;
|
|
83
|
+
};
|
|
84
|
+
defaultPromptBuilder: PromptBuilder<import("./json/jsonPromptData.js").JSONPromptData>;
|
|
85
|
+
defaultPromptInputDataBuilder: typeof import("./json/jsonPromptData.js").defaultJSONPromptDataBuilder;
|
|
86
|
+
promptHelpers: {
|
|
87
|
+
getDataForPromptNoSelection: typeof import("./json/jsonPromptData.js").getDataForPromptNoSelection;
|
|
88
|
+
getDataForPromptWithSelection: typeof import("./json/jsonPromptData.js").getDataForPromptWithSelection;
|
|
89
|
+
};
|
|
90
|
+
};
|
|
91
|
+
_experimental_markdown: {
|
|
92
|
+
getStreamToolsProvider: <T extends StreamToolsConfig = {
|
|
93
|
+
add: true;
|
|
94
|
+
update: true;
|
|
95
|
+
delete: true;
|
|
96
|
+
}>(opts?: {
|
|
97
|
+
withDelays?: boolean;
|
|
98
|
+
defaultStreamTools?: T;
|
|
99
|
+
}) => StreamToolsProvider<string, T>;
|
|
100
|
+
tools: {
|
|
101
|
+
add: (editor: BlockNoteEditor<any, any, any>, options: {
|
|
102
|
+
idsSuffixed: boolean;
|
|
103
|
+
withDelays: boolean;
|
|
104
|
+
onBlockUpdate?: ((blockId: string) => void) | undefined;
|
|
105
|
+
}) => StreamTool<AddBlocksToolCall<string>>;
|
|
106
|
+
update: (editor: BlockNoteEditor<any, any, any>, options: {
|
|
107
|
+
idsSuffixed: boolean;
|
|
108
|
+
withDelays: boolean;
|
|
109
|
+
updateSelection?: {
|
|
110
|
+
from: number;
|
|
111
|
+
to: number;
|
|
112
|
+
} | undefined;
|
|
113
|
+
onBlockUpdate?: ((blockId: string) => void) | undefined;
|
|
114
|
+
}) => StreamTool<UpdateBlockToolCall<string>>;
|
|
115
|
+
delete: (editor: BlockNoteEditor<any, any, any>, options: {
|
|
116
|
+
idsSuffixed: boolean;
|
|
117
|
+
withDelays: boolean;
|
|
118
|
+
onBlockUpdate?: (blockId: string) => void;
|
|
119
|
+
}) => StreamTool<DeleteBlockToolCall>;
|
|
120
|
+
};
|
|
121
|
+
defaultPromptBuilder: PromptBuilder<import("./markdown-blocks/markdownPromptData.js").MarkdownPromptData>;
|
|
122
|
+
defaultPromptInputDataBuilder: typeof import("./markdown-blocks/markdownPromptData.js").defaultMarkdownPromptDataBuilder;
|
|
123
|
+
promptHelpers: {
|
|
124
|
+
getDataForPromptNoSelection: typeof import("./markdown-blocks/markdownPromptData.js").getDataForPromptNoSelection;
|
|
125
|
+
getDataForPromptWithSelection: typeof import("./markdown-blocks/markdownPromptData.js").getDataForPromptWithSelection;
|
|
126
|
+
};
|
|
127
|
+
};
|
|
128
|
+
html: {
|
|
129
|
+
getStreamToolsProvider: <T extends StreamToolsConfig = {
|
|
130
|
+
add: true;
|
|
131
|
+
update: true;
|
|
132
|
+
delete: true;
|
|
133
|
+
}>(opts?: {
|
|
134
|
+
withDelays?: boolean;
|
|
135
|
+
defaultStreamTools?: T;
|
|
136
|
+
}) => StreamToolsProvider<string, T>;
|
|
137
|
+
tools: {
|
|
138
|
+
add: (editor: BlockNoteEditor<any, any, any>, options: {
|
|
139
|
+
idsSuffixed: boolean;
|
|
140
|
+
withDelays: boolean;
|
|
141
|
+
onBlockUpdate?: ((blockId: string) => void) | undefined;
|
|
142
|
+
}) => StreamTool<AddBlocksToolCall<string>>;
|
|
143
|
+
update: (editor: BlockNoteEditor<any, any, any>, options: {
|
|
144
|
+
idsSuffixed: boolean;
|
|
145
|
+
withDelays: boolean;
|
|
146
|
+
updateSelection?: {
|
|
147
|
+
from: number;
|
|
148
|
+
to: number;
|
|
149
|
+
} | undefined;
|
|
150
|
+
onBlockUpdate?: ((blockId: string) => void) | undefined;
|
|
151
|
+
}) => StreamTool<UpdateBlockToolCall<string>>;
|
|
152
|
+
delete: (editor: BlockNoteEditor<any, any, any>, options: {
|
|
153
|
+
idsSuffixed: boolean;
|
|
154
|
+
withDelays: boolean;
|
|
155
|
+
onBlockUpdate?: (blockId: string) => void;
|
|
156
|
+
}) => StreamTool<DeleteBlockToolCall>;
|
|
157
|
+
};
|
|
158
|
+
defaultPromptBuilder: PromptBuilder<HTMLPromptData>;
|
|
159
|
+
defaultPromptInputDataBuilder: typeof import("./html-blocks/htmlPromptData.js").defaultHTMLPromptInputDataBuilder;
|
|
160
|
+
promptHelpers: {
|
|
161
|
+
getDataForPromptNoSelection: typeof import("./html-blocks/htmlPromptData.js").getDataForPromptNoSelection;
|
|
162
|
+
getDataForPromptWithSelection: typeof import("./html-blocks/htmlPromptData.js").getDataForPromptWithSelection;
|
|
163
|
+
};
|
|
164
|
+
};
|
|
165
|
+
};
|
|
166
|
+
export * from "./PromptBuilder.js";
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { StreamTool, StreamToolCall } from "./streamTool.js";
|
|
2
|
+
export declare class ChunkExecutionError extends Error {
|
|
3
|
+
readonly chunk: any;
|
|
4
|
+
constructor(message: string, chunk: any, options?: {
|
|
5
|
+
cause?: unknown;
|
|
6
|
+
});
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* The Operation types wraps a StreamToolCall with metadata on whether
|
|
10
|
+
* it's an update to an existing and / or or a possibly partial (i.e.: incomplete, streaming in progress) operation
|
|
11
|
+
*/
|
|
12
|
+
type Operation<T extends StreamTool<any>[] | StreamTool<any>> = {
|
|
13
|
+
/**
|
|
14
|
+
* The StreamToolCall (parameters representing a StreamTool invocation)
|
|
15
|
+
*/
|
|
16
|
+
operation: StreamToolCall<T>;
|
|
17
|
+
/**
|
|
18
|
+
* Whether this operation is an update to the previous operation
|
|
19
|
+
* (i.e.: the previous operation was a partial operation for which we now have additional data)
|
|
20
|
+
*/
|
|
21
|
+
isUpdateToPreviousOperation: boolean;
|
|
22
|
+
/**
|
|
23
|
+
* Whether this operation is a partial operation
|
|
24
|
+
* (i.e.: incomplete, streaming in progress)
|
|
25
|
+
*/
|
|
26
|
+
isPossiblyPartial: boolean;
|
|
27
|
+
metadata: any;
|
|
28
|
+
};
|
|
29
|
+
/**
|
|
30
|
+
* The StreamToolExecutor can apply StreamToolCalls to an editor.
|
|
31
|
+
*
|
|
32
|
+
* It accepts StreamToolCalls as JSON strings or already parsed and validated Operations.
|
|
33
|
+
* Note: When passing JSON strings, the executor will parse and validate them into Operations.
|
|
34
|
+
* When passing Operations, they're expected to have been validated by the StreamTool instances already.
|
|
35
|
+
* (StreamTool.validate)
|
|
36
|
+
*
|
|
37
|
+
* Applying the operations is delegated to the StreamTool instances.
|
|
38
|
+
*
|
|
39
|
+
* @example see the `manual-execution` example
|
|
40
|
+
*/
|
|
41
|
+
export declare class StreamToolExecutor<T extends StreamTool<any>[]> {
|
|
42
|
+
private streamTools;
|
|
43
|
+
private readonly stream;
|
|
44
|
+
/**
|
|
45
|
+
* @param streamTools - The StreamTools to use to apply the StreamToolCalls
|
|
46
|
+
*/
|
|
47
|
+
constructor(streamTools: T);
|
|
48
|
+
private createStream;
|
|
49
|
+
private createExecutor;
|
|
50
|
+
/**
|
|
51
|
+
* Returns a WritableStream that can be used to write StreamToolCalls to the executor.
|
|
52
|
+
*
|
|
53
|
+
* The WriteableStream accepts JSON strings or Operation objects.
|
|
54
|
+
*
|
|
55
|
+
* Make sure to call `close` on the StreamToolExecutor instead of on the writable returned here!
|
|
56
|
+
*/
|
|
57
|
+
get writable(): WritableStream<string | Operation<T>>;
|
|
58
|
+
/**
|
|
59
|
+
* Returns a ReadableStream that can be used to read the results of the executor.
|
|
60
|
+
*/
|
|
61
|
+
get readable(): ReadableStream<{
|
|
62
|
+
status: "ok";
|
|
63
|
+
chunk: Operation<T>;
|
|
64
|
+
}>;
|
|
65
|
+
finish(): Promise<void>;
|
|
66
|
+
executeOperationsArray(source: AsyncIterable<string>): Promise<undefined>;
|
|
67
|
+
/**
|
|
68
|
+
* Accepts an async iterable and writes each chunk to the internal stream.
|
|
69
|
+
*
|
|
70
|
+
* (alternative to writing to the writable stream using {@link writable})
|
|
71
|
+
*/
|
|
72
|
+
execute(source: AsyncIterable<string | Operation<T>>): Promise<void>;
|
|
73
|
+
/**
|
|
74
|
+
* Accepts a single chunk and processes it using the same logic.
|
|
75
|
+
*
|
|
76
|
+
* (alternative to writing to the writable stream using {@link writable})
|
|
77
|
+
*/
|
|
78
|
+
executeOne(chunk: StreamToolCall<T>): Promise<void>;
|
|
79
|
+
}
|
|
80
|
+
export {};
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export * from "./jsonSchema.js";
|
|
2
|
+
export * from "./StreamToolExecutor.js";
|
|
3
|
+
export * from "./toolDefinitionsToToolSet.js";
|
|
4
|
+
export * from "./vercelAiSdk/clientside/ClientSideTransport.js";
|
|
5
|
+
export * from "./vercelAiSdk/util/partialObjectStreamUtil.js";
|
|
6
|
+
export * from "./vercelAiSdk/util/UIMessageStreamToOperationsResult.js";
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { JSONSchema7 } from "ai";
|
|
2
|
+
export declare function toolDefinitionsToToolSet(toolDefinitions: Record<string, {
|
|
3
|
+
name: string;
|
|
4
|
+
description?: string;
|
|
5
|
+
inputSchema: JSONSchema7;
|
|
6
|
+
outputSchema: JSONSchema7;
|
|
7
|
+
}>): {
|
|
8
|
+
[k: string]: import("ai").Tool<unknown, unknown>;
|
|
9
|
+
};
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import { ChatTransport, LanguageModel, ToolSet, UIMessage, UIMessageChunk, generateObject, generateText, streamObject, streamText } from "ai";
|
|
2
|
+
export declare const PROVIDER_OVERRIDES: {
|
|
3
|
+
readonly "mistral.chat": {
|
|
4
|
+
readonly mode: "auto";
|
|
5
|
+
};
|
|
6
|
+
readonly "google.generative-ai": {
|
|
7
|
+
readonly mode: "auto";
|
|
8
|
+
};
|
|
9
|
+
readonly "groq.chat": {
|
|
10
|
+
readonly providerOptions: {
|
|
11
|
+
readonly groq: {
|
|
12
|
+
readonly structuredOutputs: false;
|
|
13
|
+
};
|
|
14
|
+
};
|
|
15
|
+
};
|
|
16
|
+
};
|
|
17
|
+
export declare function getProviderOverrides(model: Exclude<LanguageModel, string>): {
|
|
18
|
+
readonly mode: "auto";
|
|
19
|
+
} | {
|
|
20
|
+
readonly mode: "auto";
|
|
21
|
+
} | {
|
|
22
|
+
readonly providerOptions: {
|
|
23
|
+
readonly groq: {
|
|
24
|
+
readonly structuredOutputs: false;
|
|
25
|
+
};
|
|
26
|
+
};
|
|
27
|
+
};
|
|
28
|
+
export declare class ClientSideTransport<UI_MESSAGE extends UIMessage> implements ChatTransport<UI_MESSAGE> {
|
|
29
|
+
readonly opts: {
|
|
30
|
+
/**
|
|
31
|
+
* The language model to use for the LLM call (AI SDK)
|
|
32
|
+
*
|
|
33
|
+
* (when invoking `callLLM` via the `AIExtension` this will default to the
|
|
34
|
+
* model set in the `AIExtension` options)
|
|
35
|
+
*
|
|
36
|
+
* Note: perhaps we want to remove this
|
|
37
|
+
*/
|
|
38
|
+
model: LanguageModel;
|
|
39
|
+
/**
|
|
40
|
+
* Whether to stream the LLM response or not
|
|
41
|
+
*
|
|
42
|
+
* When streaming, we use the AI SDK stream functions `streamObject` / `streamText,
|
|
43
|
+
* otherwise, we use the AI SDK `generateObject` / `generateText` functions.
|
|
44
|
+
*
|
|
45
|
+
* @default true
|
|
46
|
+
*/
|
|
47
|
+
stream?: boolean;
|
|
48
|
+
/**
|
|
49
|
+
* Use object generation instead of tool calling
|
|
50
|
+
*
|
|
51
|
+
* @default false
|
|
52
|
+
*/
|
|
53
|
+
objectGeneration?: boolean;
|
|
54
|
+
/**
|
|
55
|
+
* Additional options to pass to the AI SDK `generateObject` / `streamObject` / `streamText` / `generateText` functions
|
|
56
|
+
*/
|
|
57
|
+
_additionalOptions?: Partial<Parameters<typeof generateObject>[0]> | Partial<Parameters<typeof streamObject>[0]> | Partial<Parameters<typeof generateText>[0]> | Partial<Parameters<typeof streamText>[0]>;
|
|
58
|
+
};
|
|
59
|
+
constructor(opts: {
|
|
60
|
+
/**
|
|
61
|
+
* The language model to use for the LLM call (AI SDK)
|
|
62
|
+
*
|
|
63
|
+
* (when invoking `callLLM` via the `AIExtension` this will default to the
|
|
64
|
+
* model set in the `AIExtension` options)
|
|
65
|
+
*
|
|
66
|
+
* Note: perhaps we want to remove this
|
|
67
|
+
*/
|
|
68
|
+
model: LanguageModel;
|
|
69
|
+
/**
|
|
70
|
+
* Whether to stream the LLM response or not
|
|
71
|
+
*
|
|
72
|
+
* When streaming, we use the AI SDK stream functions `streamObject` / `streamText,
|
|
73
|
+
* otherwise, we use the AI SDK `generateObject` / `generateText` functions.
|
|
74
|
+
*
|
|
75
|
+
* @default true
|
|
76
|
+
*/
|
|
77
|
+
stream?: boolean;
|
|
78
|
+
/**
|
|
79
|
+
* Use object generation instead of tool calling
|
|
80
|
+
*
|
|
81
|
+
* @default false
|
|
82
|
+
*/
|
|
83
|
+
objectGeneration?: boolean;
|
|
84
|
+
/**
|
|
85
|
+
* Additional options to pass to the AI SDK `generateObject` / `streamObject` / `streamText` / `generateText` functions
|
|
86
|
+
*/
|
|
87
|
+
_additionalOptions?: Partial<Parameters<typeof generateObject>[0]> | Partial<Parameters<typeof streamObject>[0]> | Partial<Parameters<typeof generateText>[0]> | Partial<Parameters<typeof streamText>[0]>;
|
|
88
|
+
});
|
|
89
|
+
/**
|
|
90
|
+
* Calls an LLM with StreamTools, using the `generateObject` of the AI SDK.
|
|
91
|
+
*
|
|
92
|
+
* This is the non-streaming version.
|
|
93
|
+
*/
|
|
94
|
+
protected generateObject(messages: UIMessage[], tools: ToolSet): Promise<ReadableStream<UIMessageChunk>>;
|
|
95
|
+
/**
|
|
96
|
+
* Calls an LLM with StreamTools, using the `streamObject` of the AI SDK.
|
|
97
|
+
*
|
|
98
|
+
* This is the streaming version.
|
|
99
|
+
*/
|
|
100
|
+
protected streamObject(messages: UIMessage[], tools: ToolSet): Promise<ReadableStream<UIMessageChunk>>;
|
|
101
|
+
/**
|
|
102
|
+
* Calls an LLM with StreamTools, using the `streamText` of the AI SDK.
|
|
103
|
+
*
|
|
104
|
+
* This is the streaming version.
|
|
105
|
+
*/
|
|
106
|
+
protected streamText(messages: UIMessage[], tools: ToolSet): Promise<import("ai").AsyncIterableStream<import("ai").InferUIMessageChunk<UIMessage<unknown, import("ai").UIDataTypes, import("ai").UITools>>>>;
|
|
107
|
+
sendMessages({ messages, body, }: Parameters<ChatTransport<UI_MESSAGE>["sendMessages"]>[0]): Promise<ReadableStream<UIMessageChunk>>;
|
|
108
|
+
reconnectToStream(): Promise<ReadableStream<UIMessageChunk> | null>;
|
|
109
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { DeepPartial } from "ai";
|
|
2
|
+
import { StreamTool, StreamToolCall } from "../../streamTool.js";
|
|
3
|
+
import { AsyncIterableStream } from "../../../util/stream.js";
|
|
4
|
+
/**
|
|
5
|
+
* Result of an LLM call with stream tools
|
|
6
|
+
*/
|
|
7
|
+
type OperationsResult<T extends StreamTool<any>[]> = AsyncIterableStream<{
|
|
8
|
+
/**
|
|
9
|
+
* The operation the LLM wants to execute
|
|
10
|
+
*/
|
|
11
|
+
operation: StreamToolCall<T>;
|
|
12
|
+
/**
|
|
13
|
+
* Whether {@link operation} is an update to the previous operation in the stream.
|
|
14
|
+
*
|
|
15
|
+
* For non-streaming mode, this will always be `false`
|
|
16
|
+
*/
|
|
17
|
+
isUpdateToPreviousOperation: boolean;
|
|
18
|
+
/**
|
|
19
|
+
* Whether the {@link operation} is possibly partial (i.e. the LLM is still streaming data about this operation)
|
|
20
|
+
*
|
|
21
|
+
* For non-streaming mode, this will always be `false`
|
|
22
|
+
*/
|
|
23
|
+
isPossiblyPartial: boolean;
|
|
24
|
+
metadata: any;
|
|
25
|
+
}>;
|
|
26
|
+
export declare function objectStreamToOperationsResult<T extends StreamTool<any>[]>(stream: ReadableStream<DeepPartial<{
|
|
27
|
+
operations: StreamToolCall<T>[];
|
|
28
|
+
}>>, streamTools: T, chunkMetadata: any): OperationsResult<T>;
|
|
29
|
+
export {};
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { Chat } from "@ai-sdk/react";
|
|
2
|
+
import { UIMessage } from "ai";
|
|
3
|
+
import { StreamTool } from "../../streamTool.js";
|
|
4
|
+
/**
|
|
5
|
+
* Listens to messages received in the `chat` object and processes tool calls
|
|
6
|
+
* by streaming them to an executor
|
|
7
|
+
*
|
|
8
|
+
* It also listens to the status and error events of the chat object and handles them
|
|
9
|
+
* appropriately.
|
|
10
|
+
*
|
|
11
|
+
* It also waits for all tool calls to be completed and then adds the results to the chat object.
|
|
12
|
+
*
|
|
13
|
+
* NOTE: listening to the `chat` object + error handling is a bit cumbersome. It might have been
|
|
14
|
+
* cleaner to directly listen to the UIMessageStream. However, for that we'd probably
|
|
15
|
+
* need to wrap the transport or chat object in AIExtension
|
|
16
|
+
*
|
|
17
|
+
* The error handling is currently quite convoluted. To properly test this,
|
|
18
|
+
* you can:
|
|
19
|
+
* a) make sure a tool call fails
|
|
20
|
+
* b) make sure the entire request fails (network error)
|
|
21
|
+
*
|
|
22
|
+
*/
|
|
23
|
+
export declare function setupToolCallStreaming(streamTools: StreamTool<any>[], chat: Chat<UIMessage>, onStart?: () => void): Promise<void>;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { ObjectStreamPart, UIMessageChunk } from "ai";
|
|
2
|
+
/**
|
|
3
|
+
* This file contains some helper functions to convert between object generation (streaming and non-streaming)
|
|
4
|
+
* and UI Message streams and vice versa.
|
|
5
|
+
*
|
|
6
|
+
* We convert object streams / generated objects to tool calls in UIMessageStreams because:
|
|
7
|
+
*
|
|
8
|
+
* - it simplifies our codebase (we can just handle everything as tool calls after conversion)
|
|
9
|
+
* - there are some issues with using a TextStream, see below:
|
|
10
|
+
*
|
|
11
|
+
* Normally, the AI SDK uses a TextStream to transport generated objects / object streams.
|
|
12
|
+
* However, this does not work well with error handling
|
|
13
|
+
*
|
|
14
|
+
* See:
|
|
15
|
+
*
|
|
16
|
+
* @see https://github.com/vercel/ai/issues/5027#issuecomment-2701011869
|
|
17
|
+
* @see https://github.com/vercel/ai/issues/5115
|
|
18
|
+
*/
|
|
19
|
+
/**
|
|
20
|
+
* Transforms a partial object stream to a data stream format.
|
|
21
|
+
* This is needed to pass errors through to the client in a clean way.
|
|
22
|
+
*
|
|
23
|
+
* @param stream - The partial object stream to transform
|
|
24
|
+
* @returns A ReadableStream that emits data stream formatted chunks
|
|
25
|
+
*
|
|
26
|
+
* Based on: https://github.com/vercel/ai/blob/b2469681bd31635a33a4b233d889f122c0b432c9/packages/ai/src/ui/transform-text-to-ui-message-stream.ts#L3
|
|
27
|
+
*
|
|
28
|
+
*/
|
|
29
|
+
export declare function partialObjectStreamAsToolCallInUIMessageStream<PARTIAL>(stream: ReadableStream<ObjectStreamPart<PARTIAL>>, toolName: string): ReadableStream<UIMessageChunk>;
|
|
30
|
+
export declare function objectAsToolCallInUIMessageStream(object: any, toolName: string): ReadableStream<UIMessageChunk>;
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { Chat } from "@ai-sdk/react";
|
|
2
|
+
import { ChatTransport, UIMessage } from "ai";
|
|
3
|
+
import { AIRequestSender, StreamToolsProvider } from "./index.js";
|
|
4
|
+
/**
|
|
5
|
+
* Extra options (header, body, metadata) that can be passed to LLM requests
|
|
6
|
+
* This is a pattern we take from the Vercel AI SDK
|
|
7
|
+
*/
|
|
8
|
+
export type ChatRequestOptions = Parameters<Chat<UIMessage>["sendMessage"]>[1];
|
|
9
|
+
export type AIRequestHelpers = {
|
|
10
|
+
/**
|
|
11
|
+
* The Vercel AI SDK transport is responsible for sending the AI SDK Request to the LLM backend
|
|
12
|
+
*
|
|
13
|
+
* Implement this function if you want to:
|
|
14
|
+
* - use a custom backend
|
|
15
|
+
* - change backend URLs
|
|
16
|
+
* - use a different transport layer (e.g.: websockets)
|
|
17
|
+
*/
|
|
18
|
+
transport?: ChatTransport<UIMessage>;
|
|
19
|
+
/**
|
|
20
|
+
* Customize which stream tools are available to the LLM
|
|
21
|
+
*/
|
|
22
|
+
streamToolsProvider?: StreamToolsProvider<any, any>;
|
|
23
|
+
/**
|
|
24
|
+
* Extra options (header, body, metadata) that can be passed to LLM requests
|
|
25
|
+
* This is a pattern we take from the Vercel AI SDK
|
|
26
|
+
*/
|
|
27
|
+
chatRequestOptions?: ChatRequestOptions;
|
|
28
|
+
/**
|
|
29
|
+
* Responsible for submitting a BlockNote `AIRequest` to the Vercel AI SDK.
|
|
30
|
+
* Use this to transform the messages sent to the LLM
|
|
31
|
+
*
|
|
32
|
+
* @default `defaultAIRequestSender(aiDocumentFormats.html.defaultPromptBuilder, aiDocumentFormats.html.defaultPromptInputDataBuilder)`
|
|
33
|
+
*/
|
|
34
|
+
aiRequestSender?: AIRequestSender;
|
|
35
|
+
};
|
|
36
|
+
export type InvokeAIOptions = {
|
|
37
|
+
/**
|
|
38
|
+
* The user prompt to use for the LLM call
|
|
39
|
+
*/
|
|
40
|
+
userPrompt: string;
|
|
41
|
+
/**
|
|
42
|
+
* Whether to use the editor selection for the LLM call
|
|
43
|
+
*
|
|
44
|
+
* @default true
|
|
45
|
+
*/
|
|
46
|
+
useSelection?: boolean;
|
|
47
|
+
/**
|
|
48
|
+
* If the user's cursor is in an empty paragraph, automatically delete it when the AI
|
|
49
|
+
* is starting to write.
|
|
50
|
+
*
|
|
51
|
+
* (This is used when a user starts typing `/ai` in an empty block)
|
|
52
|
+
*
|
|
53
|
+
* @default true
|
|
54
|
+
*/
|
|
55
|
+
deleteEmptyCursorBlock?: boolean;
|
|
56
|
+
} & AIRequestHelpers;
|