@fluidframework/ai-collab 2.22.1 → 2.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +4 -0
- package/README.md +70 -4
- package/api-report/ai-collab.alpha.api.md +170 -2
- package/dist/aiCollab.d.ts +0 -1
- package/dist/aiCollab.d.ts.map +1 -1
- package/dist/aiCollab.js +1 -2
- package/dist/aiCollab.js.map +1 -1
- package/dist/aiCollabApi.d.ts +50 -3
- package/dist/aiCollabApi.d.ts.map +1 -1
- package/dist/aiCollabApi.js.map +1 -1
- package/dist/alpha.d.ts +17 -0
- package/dist/explicit-strategy/debugEvents.d.ts +248 -0
- package/dist/explicit-strategy/debugEvents.d.ts.map +1 -0
- package/dist/explicit-strategy/debugEvents.js +36 -0
- package/dist/explicit-strategy/debugEvents.js.map +1 -0
- package/dist/explicit-strategy/index.d.ts +4 -4
- package/dist/explicit-strategy/index.d.ts.map +1 -1
- package/dist/explicit-strategy/index.js +176 -54
- package/dist/explicit-strategy/index.js.map +1 -1
- package/dist/index.d.ts +2 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js.map +1 -1
- package/lib/aiCollab.d.ts +0 -1
- package/lib/aiCollab.d.ts.map +1 -1
- package/lib/aiCollab.js +1 -2
- package/lib/aiCollab.js.map +1 -1
- package/lib/aiCollabApi.d.ts +50 -3
- package/lib/aiCollabApi.d.ts.map +1 -1
- package/lib/aiCollabApi.js.map +1 -1
- package/lib/alpha.d.ts +17 -0
- package/lib/explicit-strategy/debugEvents.d.ts +248 -0
- package/lib/explicit-strategy/debugEvents.d.ts.map +1 -0
- package/lib/explicit-strategy/debugEvents.js +32 -0
- package/lib/explicit-strategy/debugEvents.js.map +1 -0
- package/lib/explicit-strategy/index.d.ts +4 -4
- package/lib/explicit-strategy/index.d.ts.map +1 -1
- package/lib/explicit-strategy/index.js +174 -52
- package/lib/explicit-strategy/index.js.map +1 -1
- package/lib/index.d.ts +2 -1
- package/lib/index.d.ts.map +1 -1
- package/lib/index.js.map +1 -1
- package/package.json +13 -11
- package/src/aiCollab.ts +1 -2
- package/src/aiCollabApi.ts +54 -3
- package/src/explicit-strategy/debugEvents.ts +297 -0
- package/src/explicit-strategy/index.ts +269 -59
- package/src/index.ts +20 -0
package/CHANGELOG.md
CHANGED
package/README.md
CHANGED
|
@@ -85,14 +85,14 @@ export class PlannerAppState extends sf.object("PlannerAppState", {
|
|
|
85
85
|
### Example 1: Collaborate with AI
|
|
86
86
|
|
|
87
87
|
```ts
|
|
88
|
-
import { aiCollab } from "@fluidframework/ai-collab/alpha";
|
|
88
|
+
import { aiCollab, DebugEvent } from "@fluidframework/ai-collab/alpha";
|
|
89
89
|
import { PlannerAppState } from "./types.ts"
|
|
90
90
|
// This is not a real file, this is meant to represent how you initialize your app data.
|
|
91
91
|
import { initializeAppState } from "./yourAppInitializationFile.ts"
|
|
92
92
|
|
|
93
93
|
// --------- File name: "app.ts" ---------
|
|
94
94
|
|
|
95
|
-
// Initialize your app state somehow
|
|
95
|
+
// Initialize your Fluid app state somehow
|
|
96
96
|
const appState: PlannerAppState = initializeAppState({
|
|
97
97
|
taskGroups: [
|
|
98
98
|
{
|
|
@@ -143,9 +143,12 @@ const response = await aiCollab({
|
|
|
143
143
|
"You are a manager that is helping out with a project management tool. You have been asked to edit a group of tasks.",
|
|
144
144
|
userAsk: userAsk,
|
|
145
145
|
},
|
|
146
|
+
limiters: {
|
|
147
|
+
maxModelCalls: 25
|
|
148
|
+
}
|
|
146
149
|
planningStep: true,
|
|
147
150
|
finalReviewStep: true,
|
|
148
|
-
|
|
151
|
+
debugEventLogHandler: (event: DebugEvent) => {console.log(event);}
|
|
149
152
|
});
|
|
150
153
|
|
|
151
154
|
if (response.status === 'sucess') {
|
|
@@ -174,12 +177,75 @@ Once the `aiCollab` function call is initiated, an LLM will immediately begin at
|
|
|
174
177
|
- `promptGeneration.ts`: Logic for producing the different types of prompts sent to an LLM in order to edit a SharedTree.
|
|
175
178
|
- `typeGeneration.ts`: Generates serialized(/able) representations of a SharedTree Schema which is used within prompts and the generated of the structured output JSON schema
|
|
176
179
|
- `utils.ts`: Utilities for interacting with a SharedTree
|
|
180
|
+
- `debugEvents.ts`: Types and helper functions for `DebugEvent`'s emitted to the callback provided to the aiCollab's `debugEventLogHandler`
|
|
177
181
|
- `/implicit-strategy`: The original implicit strategy, currently not used under the exported aiCollab API surface.
|
|
178
182
|
|
|
183
|
+
## Debug Events
|
|
184
|
+
This package allows users to consume `DebugEvents` that can be very helpful in understanding what's going on internally and debugging potential issues.
|
|
185
|
+
Users can consume these events by passing in a `debugEventLogHandler` when calling the `aiCollab()` function:
|
|
186
|
+
```ts
|
|
187
|
+
aiCollab({
|
|
188
|
+
openAI: {
|
|
189
|
+
client: new OpenAI({
|
|
190
|
+
apiKey: OPENAI_API_KEY,
|
|
191
|
+
}),
|
|
192
|
+
modelName: "gpt-4o",
|
|
193
|
+
},
|
|
194
|
+
treeNode: view.root.taskGroups[0],
|
|
195
|
+
prompt: {
|
|
196
|
+
systemRoleContext:
|
|
197
|
+
"You are a manager that is helping out with a project management tool. You have been asked to edit a group of tasks.",
|
|
198
|
+
userAsk: userAsk,
|
|
199
|
+
},
|
|
200
|
+
limiters: {
|
|
201
|
+
maxModelCalls: 25
|
|
202
|
+
}
|
|
203
|
+
planningStep: true,
|
|
204
|
+
finalReviewStep: true,
|
|
205
|
+
debugEventLogHandler: (event: DebugEvent) => {console.log(event);} // This should be your debug event log handler
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
```
|
|
209
|
+
|
|
210
|
+
All debug events implement the `DebugEvent` interface. Some also implement `EventFlowDebugEvent`, which lets them mark a progress point in a specific logic flow within a given execution of `aiCollab()`.
|
|
211
|
+
|
|
212
|
+
### Event flow Overview
|
|
213
|
+
To see detailed information about each event, please read their cooresponding [tsdoc](./src/explicit-strategy/debugEvents.ts#L46)
|
|
214
|
+
|
|
215
|
+
1. **Core Event Loop** - The start and end of a single execution of aiCollab.
|
|
216
|
+
- Events:
|
|
217
|
+
1. **Core Event Loop Started**
|
|
218
|
+
1. **Core Event Loop Completed**
|
|
219
|
+
2. **Generate Planning Prompt** - The event flow for producing an initial LLM generated plan to assist the LLM with creating edits to the users Shared Tree.
|
|
220
|
+
- Events
|
|
221
|
+
1. **Generate Planning Prompt Started**
|
|
222
|
+
- Child `DebugEvent`'s triggered:
|
|
223
|
+
1. **Llm Api Call** - An event detailing the raw api request to the LLM client.
|
|
224
|
+
1. **Generate Planning Prompt Completed**
|
|
225
|
+
3. **Generate Tree Edit** - The event flow for generating an edit to the users Shared Tree to further complete the users request.
|
|
226
|
+
- Events:
|
|
227
|
+
1. **Generate Tree Edit Started**
|
|
228
|
+
- Child `DebugEvent`'s triggered:
|
|
229
|
+
1. **Llm Api Call** - An event detailing the raw api request to the LLM client.
|
|
230
|
+
1. **Generate Tree Edit Completed**
|
|
231
|
+
1. **Apply Edit Success** OR **Apply Edit Failure** - The outcome of applying the LLM generated tree edit.
|
|
232
|
+
4. **Final Review** - The event flow for asking the LLM to complete a final review of work it has completed and confirming if the users request has been completed. If the LLM is not satisfied, the **Generate Tree Edit** loop will start again.
|
|
233
|
+
- Events:
|
|
234
|
+
- **Final Review Started**
|
|
235
|
+
- Child `DebugEvent`'s triggered:
|
|
236
|
+
1. **Llm Api Call** - An event detailing the raw api request to the LLM client.
|
|
237
|
+
- **Final Review Completed**
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
### Using Trace Id's
|
|
241
|
+
Debug Events in ai-collab have two different types of trace id's:
|
|
242
|
+
- `traceId`: This field exists on all debug events and can be used to correlate all debug events that happened in a single execution of `aiCollab()`. Sorting the events by timestamp will show the proper chronological order of the events. Note that the events should already be emitted in chronological order.
|
|
243
|
+
- `eventFlowTraceId`: this field exists on all `EventFlowDebugEvents` and can be used to correlate all events from a particular event flow. Additionally all LLM api call events will contain the `eventFlowTraceId` field as well as a `triggeringEventFlowName` so you can link LLM API calls to a particular event flow.
|
|
244
|
+
|
|
245
|
+
|
|
179
246
|
## Known Issues & limitations
|
|
180
247
|
|
|
181
248
|
1. Union types for a TreeNode are not present when generating App Schema. This will require extracting a field schema instead of TreeNodeSchema when passed a non root node.
|
|
182
|
-
1. The Editing System prompt & structured out schema currently provide array related edits even when there are no arrays. This forces you to have an array in your schema to produce a valid json schema
|
|
183
249
|
1. Optional roots are not allowed, This is because if you pass undefined as your treeNode to the API, we cannot disambiguate whether you passed the root or not.
|
|
184
250
|
1. Primitive root nodes are not allowed to be passed to the API. You must use an object or array as your root.
|
|
185
251
|
1. Optional nodes are not supported -- when we use optional nodes, the OpenAI API returns an error complaining that the structured output JSON schema is invalid. I have introduced a fix that should work upon manual validation of the json schema, but there looks to be an issue with their API. I have filed a ticket with OpenAI to address this
|
|
@@ -9,14 +9,14 @@ export function aiCollab(options: AiCollabOptions): Promise<AiCollabSuccessRespo
|
|
|
9
9
|
|
|
10
10
|
// @alpha
|
|
11
11
|
export interface AiCollabErrorResponse {
|
|
12
|
-
readonly errorMessage: "tokenLimitExceeded" | "tooManyErrors" | "tooManyModelCalls" | "aborted";
|
|
12
|
+
readonly errorMessage: "tokenLimitExceeded" | "tooManyErrors" | "tooManyModelCalls" | "aborted" | "unexpectedError";
|
|
13
13
|
readonly status: "failure" | "partial-failure";
|
|
14
14
|
readonly tokensUsed: TokenUsage;
|
|
15
15
|
}
|
|
16
16
|
|
|
17
17
|
// @alpha
|
|
18
18
|
export interface AiCollabOptions {
|
|
19
|
-
readonly
|
|
19
|
+
readonly debugEventLogHandler?: DebugEventLogHandler;
|
|
20
20
|
readonly finalReviewStep?: boolean;
|
|
21
21
|
readonly limiters?: {
|
|
22
22
|
readonly abortController?: AbortController;
|
|
@@ -40,12 +40,75 @@ export interface AiCollabSuccessResponse {
|
|
|
40
40
|
readonly tokensUsed: TokenUsage;
|
|
41
41
|
}
|
|
42
42
|
|
|
43
|
+
// @alpha
|
|
44
|
+
export interface ApplyEditFailure extends EventFlowDebugEvent {
|
|
45
|
+
edit: LlmTreeEdit;
|
|
46
|
+
errorMessage: string;
|
|
47
|
+
// (undocumented)
|
|
48
|
+
eventFlowName: typeof EventFlowDebugNames.GENERATE_AND_APPLY_TREE_EDIT;
|
|
49
|
+
// (undocumented)
|
|
50
|
+
eventFlowStatus: "IN_PROGRESS";
|
|
51
|
+
eventFlowTraceId: string;
|
|
52
|
+
// (undocumented)
|
|
53
|
+
eventName: "APPLIED_EDIT_FAILURE";
|
|
54
|
+
sequentialErrorCount: number;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// @alpha
|
|
58
|
+
export interface ApplyEditSuccess extends EventFlowDebugEvent {
|
|
59
|
+
edit: LlmTreeEdit;
|
|
60
|
+
// (undocumented)
|
|
61
|
+
eventFlowName: typeof EventFlowDebugNames.GENERATE_AND_APPLY_TREE_EDIT;
|
|
62
|
+
// (undocumented)
|
|
63
|
+
eventFlowStatus: "IN_PROGRESS";
|
|
64
|
+
eventFlowTraceId: string;
|
|
65
|
+
// (undocumented)
|
|
66
|
+
eventName: "APPLIED_EDIT_SUCCESS";
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// @alpha
|
|
70
|
+
export interface CoreEventLoopCompleted extends EventFlowDebugEvent {
|
|
71
|
+
// (undocumented)
|
|
72
|
+
errorMessage?: string;
|
|
73
|
+
// (undocumented)
|
|
74
|
+
eventFlowName: typeof EventFlowDebugNames.CORE_EVENT_LOOP;
|
|
75
|
+
// (undocumented)
|
|
76
|
+
eventFlowStatus: "COMPLETED";
|
|
77
|
+
// (undocumented)
|
|
78
|
+
eventName: "CORE_EVENT_LOOP_COMPLETED";
|
|
79
|
+
// (undocumented)
|
|
80
|
+
failureReason?: string;
|
|
81
|
+
// (undocumented)
|
|
82
|
+
status: "success" | "failure";
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// @alpha
|
|
86
|
+
export interface CoreEventLoopStarted extends EventFlowDebugEvent {
|
|
87
|
+
// (undocumented)
|
|
88
|
+
eventFlowName: typeof EventFlowDebugNames.CORE_EVENT_LOOP;
|
|
89
|
+
// (undocumented)
|
|
90
|
+
eventFlowStatus: "STARTED";
|
|
91
|
+
// (undocumented)
|
|
92
|
+
eventName: "CORE_EVENT_LOOP_STARTED";
|
|
93
|
+
}
|
|
94
|
+
|
|
43
95
|
// @alpha
|
|
44
96
|
export function createMergableDiffSeries(diffs: Difference[]): Difference[];
|
|
45
97
|
|
|
46
98
|
// @alpha
|
|
47
99
|
export function createMergableIdDiffSeries(oldObject: unknown, diffs: Difference[], idAttributeName: string | number): Difference[];
|
|
48
100
|
|
|
101
|
+
// @alpha
|
|
102
|
+
export interface DebugEvent {
|
|
103
|
+
eventName: string;
|
|
104
|
+
id: string;
|
|
105
|
+
timestamp: string;
|
|
106
|
+
traceId: string;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// @alpha
|
|
110
|
+
export type DebugEventLogHandler = <T extends DebugEvent>(event: T) => unknown;
|
|
111
|
+
|
|
49
112
|
// @alpha
|
|
50
113
|
export type Difference = DifferenceCreate | DifferenceRemove | DifferenceChange | DifferenceMove;
|
|
51
114
|
|
|
@@ -99,6 +162,89 @@ export interface DifferenceRemove {
|
|
|
99
162
|
type: "REMOVE";
|
|
100
163
|
}
|
|
101
164
|
|
|
165
|
+
// @alpha
|
|
166
|
+
export interface EventFlowDebugEvent extends DebugEvent {
|
|
167
|
+
eventFlowName: string;
|
|
168
|
+
eventFlowStatus: "STARTED" | "COMPLETED" | "IN_PROGRESS";
|
|
169
|
+
eventFlowTraceId: string;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// @alpha
|
|
173
|
+
export type EventFlowDebugName = (typeof EventFlowDebugNames)[keyof typeof EventFlowDebugNames];
|
|
174
|
+
|
|
175
|
+
// @alpha
|
|
176
|
+
export const EventFlowDebugNames: {
|
|
177
|
+
readonly CORE_EVENT_LOOP: "CORE_EVENT_LOOP";
|
|
178
|
+
readonly GENERATE_PLANNING_PROMPT: "GENERATE_PLANNING_PROMPT";
|
|
179
|
+
readonly GENERATE_AND_APPLY_TREE_EDIT: "GENERATE_AND_APPLY_TREE_EDIT";
|
|
180
|
+
readonly FINAL_REVIEW: "FINAL_REVIEW";
|
|
181
|
+
};
|
|
182
|
+
|
|
183
|
+
// @alpha
|
|
184
|
+
export interface FinalReviewCompleted<TIsLlmResponseValid = boolean, TReviewResponse = TIsLlmResponseValid extends true ? "yes" | "no" : undefined> extends EventFlowDebugEvent {
|
|
185
|
+
didLlmAccomplishGoal: TReviewResponse;
|
|
186
|
+
// (undocumented)
|
|
187
|
+
eventFlowName: typeof EventFlowDebugNames.FINAL_REVIEW;
|
|
188
|
+
// (undocumented)
|
|
189
|
+
eventFlowStatus: "COMPLETED";
|
|
190
|
+
// (undocumented)
|
|
191
|
+
eventName: "FINAL_REVIEW_COMPLETED";
|
|
192
|
+
isLlmResponseValid: TIsLlmResponseValid;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// @alpha
|
|
196
|
+
export interface FinalReviewStarted extends EventFlowDebugEvent {
|
|
197
|
+
// (undocumented)
|
|
198
|
+
eventFlowName: typeof EventFlowDebugNames.FINAL_REVIEW;
|
|
199
|
+
// (undocumented)
|
|
200
|
+
eventFlowStatus: "STARTED";
|
|
201
|
+
// (undocumented)
|
|
202
|
+
eventName: "FINAL_REVIEW_STARTED";
|
|
203
|
+
llmPrompt: string;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// @alpha
|
|
207
|
+
export interface GenerateTreeEditCompleted<TIsLlmResponseValid = boolean, TEdit = TIsLlmResponseValid extends true ? LlmTreeEdit | null : undefined> extends EventFlowDebugEvent {
|
|
208
|
+
// (undocumented)
|
|
209
|
+
eventFlowName: typeof EventFlowDebugNames.GENERATE_AND_APPLY_TREE_EDIT;
|
|
210
|
+
// (undocumented)
|
|
211
|
+
eventFlowStatus: "COMPLETED";
|
|
212
|
+
// (undocumented)
|
|
213
|
+
eventName: "GENERATE_TREE_EDIT_COMPLETED";
|
|
214
|
+
isLlmResponseValid: TIsLlmResponseValid;
|
|
215
|
+
llmGeneratedEdit: TEdit;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
// @alpha
|
|
219
|
+
export interface GenerateTreeEditStarted extends EventFlowDebugEvent {
|
|
220
|
+
// (undocumented)
|
|
221
|
+
eventFlowName: typeof EventFlowDebugNames.GENERATE_AND_APPLY_TREE_EDIT;
|
|
222
|
+
// (undocumented)
|
|
223
|
+
eventFlowStatus: "STARTED";
|
|
224
|
+
// (undocumented)
|
|
225
|
+
eventName: "GENERATE_TREE_EDIT_STARTED";
|
|
226
|
+
// (undocumented)
|
|
227
|
+
llmPrompt: string;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// @alpha
|
|
231
|
+
export interface LlmApiCallDebugEvent extends DebugEvent {
|
|
232
|
+
eventFlowTraceId: string;
|
|
233
|
+
// (undocumented)
|
|
234
|
+
eventName: "LLM_API_CALL";
|
|
235
|
+
modelName: string;
|
|
236
|
+
requestParams: unknown;
|
|
237
|
+
response: unknown;
|
|
238
|
+
tokenUsage?: {
|
|
239
|
+
promptTokens: number;
|
|
240
|
+
completionTokens: number;
|
|
241
|
+
};
|
|
242
|
+
triggeringEventFlowName: EventFlowDebugName;
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
// @alpha
|
|
246
|
+
export type LlmTreeEdit = Record<string, unknown>;
|
|
247
|
+
|
|
102
248
|
// @alpha
|
|
103
249
|
export type ObjectPath = (string | number)[];
|
|
104
250
|
|
|
@@ -118,6 +264,28 @@ export interface Options {
|
|
|
118
264
|
} | undefined;
|
|
119
265
|
}
|
|
120
266
|
|
|
267
|
+
// @alpha
|
|
268
|
+
export interface PlanningPromptCompleted<TIsLlmResponseValid = boolean, TPlan = TIsLlmResponseValid extends true ? string : undefined> extends EventFlowDebugEvent {
|
|
269
|
+
// (undocumented)
|
|
270
|
+
eventFlowName: typeof EventFlowDebugNames.GENERATE_PLANNING_PROMPT;
|
|
271
|
+
// (undocumented)
|
|
272
|
+
eventFlowStatus: "COMPLETED";
|
|
273
|
+
// (undocumented)
|
|
274
|
+
eventName: "GENERATE_PLANNING_PROMPT_COMPLETED";
|
|
275
|
+
isLlmResponseValid: TIsLlmResponseValid;
|
|
276
|
+
llmGeneratedPlan: TPlan;
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
// @alpha
|
|
280
|
+
export interface PlanningPromptStarted extends EventFlowDebugEvent {
|
|
281
|
+
// (undocumented)
|
|
282
|
+
eventFlowName: typeof EventFlowDebugNames.GENERATE_PLANNING_PROMPT;
|
|
283
|
+
// (undocumented)
|
|
284
|
+
eventFlowStatus: "STARTED";
|
|
285
|
+
// (undocumented)
|
|
286
|
+
eventName: "GENERATE_PLANNING_PROMPT_STARTED";
|
|
287
|
+
}
|
|
288
|
+
|
|
121
289
|
// @alpha
|
|
122
290
|
export class SharedTreeBranchManager {
|
|
123
291
|
constructor(params?: {
|
package/dist/aiCollab.d.ts
CHANGED
package/dist/aiCollab.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"aiCollab.d.ts","sourceRoot":"","sources":["../src/aiCollab.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EACX,qBAAqB,EACrB,eAAe,EACf,uBAAuB,EACvB,MAAM,kBAAkB,CAAC;AAG1B
|
|
1
|
+
{"version":3,"file":"aiCollab.d.ts","sourceRoot":"","sources":["../src/aiCollab.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EACX,qBAAqB,EACrB,eAAe,EACf,uBAAuB,EACvB,MAAM,kBAAkB,CAAC;AAG1B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAwDG;AACH,wBAAsB,QAAQ,CAC7B,OAAO,EAAE,eAAe,GACtB,OAAO,CAAC,uBAAuB,GAAG,qBAAqB,CAAC,CAa1D"}
|
package/dist/aiCollab.js
CHANGED
|
@@ -49,7 +49,6 @@ const index_js_1 = require("./explicit-strategy/index.js");
|
|
|
49
49
|
* },
|
|
50
50
|
* planningStep: true,
|
|
51
51
|
* finalReviewStep: true,
|
|
52
|
-
* dumpDebugLog: true,
|
|
53
52
|
* });
|
|
54
53
|
* ```
|
|
55
54
|
*
|
|
@@ -71,9 +70,9 @@ async function aiCollab(options) {
|
|
|
71
70
|
openAI: options.openAI,
|
|
72
71
|
prompt: options.prompt,
|
|
73
72
|
limiters: options.limiters,
|
|
74
|
-
dumpDebugLog: options.dumpDebugLog,
|
|
75
73
|
planningStep: options.planningStep,
|
|
76
74
|
finalReviewStep: options.finalReviewStep,
|
|
75
|
+
debugEventLogHandler: options.debugEventLogHandler,
|
|
77
76
|
});
|
|
78
77
|
return response;
|
|
79
78
|
}
|
package/dist/aiCollab.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"aiCollab.js","sourceRoot":"","sources":["../src/aiCollab.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAOH,2DAAiE;AAEjE
|
|
1
|
+
{"version":3,"file":"aiCollab.js","sourceRoot":"","sources":["../src/aiCollab.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAOH,2DAAiE;AAEjE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAwDG;AACI,KAAK,UAAU,QAAQ,CAC7B,OAAwB;IAExB,MAAM,QAAQ,GAAG,MAAM,IAAA,4BAAiB,EAAC;QACxC,QAAQ,EAAE,OAAO,CAAC,QAAQ;QAC1B,SAAS,EAAE,OAAO,CAAC,SAAS;QAC5B,MAAM,EAAE,OAAO,CAAC,MAAM;QACtB,MAAM,EAAE,OAAO,CAAC,MAAM;QACtB,QAAQ,EAAE,OAAO,CAAC,QAAQ;QAC1B,YAAY,EAAE,OAAO,CAAC,YAAY;QAClC,eAAe,EAAE,OAAO,CAAC,eAAe;QACxC,oBAAoB,EAAE,OAAO,CAAC,oBAAoB;KAClD,CAAC,CAAC;IAEH,OAAO,QAAQ,CAAC;AACjB,CAAC;AAfD,4BAeC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport type {\n\tAiCollabErrorResponse,\n\tAiCollabOptions,\n\tAiCollabSuccessResponse,\n} from \"./aiCollabApi.js\";\nimport { generateTreeEdits } from \"./explicit-strategy/index.js\";\n\n/**\n * Calls an LLM to modify the provided SharedTree in a series of real time edits based on the provided users prompt input.\n * @remarks This function is designed to be a controlled \"all-in-one\" function that handles the entire process of calling an LLM to collaborative edit a SharedTree.\n *\n * @example\n * ```typescript\n * import {\n * \tSchemaFactory,\n * \tTreeViewConfiguration,\n * \ttype TreeView\n * } from \"@fluidframework/tree\";\n *\n * const sf = new SchemaFactory(\"todo-app\");\n *\n * class TodoTask extends sf.object(\"TodoTask\", {\n * \ttitle: sf.string,\n * \tdescription: sf.string,\n * }) {}\n *\n * class TodoAppState extends sf.object(\"TodoAppState\", {\n * \ttasks: sf.array(TodoTask),\n * }) {}\n *\n * // Initialize your SharedTree\n * const treeView: TreeView = tree.viewWith(new TreeViewConfiguration({ schema: TodoAppState }));\n * treeView.initialize({ tasks: [] });\n *\n * // Collaborate with AI in realtime in just one function call.\n * const response = await aiCollab({\n * \t\topenAI: {\n * \t\t\tclient: new OpenAI({\n * \t\t\t\tapiKey: OPENAI_API_KEY,\n * \t\t\t}),\n * \t\t\tmodelName: \"gpt-4o\",\n * \t\t},\n * \t\ttreeNode: view.root,\n * \t\tprompt: {\n * \t\t\tsystemRoleContext:\n * \t\t\t\t\"You are an helpful assistant managing a todo list for a user.\",\n * \t\t\tuserAsk: \"Create a set of new todos to plan a vacation to Cancun.\",\n * \t\t},\n * \t\tplanningStep: true,\n * \t\tfinalReviewStep: true,\n * \t});\n * ```\n *\n * @remarks Known Limitiations:\n * - Root level array nodes are not supported\n * - Nested arrays are not supported\n * - Primitive nodes are not supported, e.g. 'string', 'number', 'boolean'\n * - Your application's Shared Tree schema must have no more than 4 levels of nesting\n * - Optional nodes are not supported in the Shared Tree schema\n * - Union types are not supported in the Shared Tree schema\n * - See README for more details.\n *\n * @alpha\n */\nexport async function aiCollab(\n\toptions: AiCollabOptions,\n): Promise<AiCollabSuccessResponse | AiCollabErrorResponse> {\n\tconst response = await generateTreeEdits({\n\t\ttreeNode: options.treeNode,\n\t\tvalidator: options.validator,\n\t\topenAI: options.openAI,\n\t\tprompt: options.prompt,\n\t\tlimiters: options.limiters,\n\t\tplanningStep: options.planningStep,\n\t\tfinalReviewStep: options.finalReviewStep,\n\t\tdebugEventLogHandler: options.debugEventLogHandler,\n\t});\n\n\treturn response;\n}\n"]}
|
package/dist/aiCollabApi.d.ts
CHANGED
|
@@ -4,6 +4,52 @@
|
|
|
4
4
|
*/
|
|
5
5
|
import type { TreeNode } from "@fluidframework/tree";
|
|
6
6
|
import type OpenAI from "openai";
|
|
7
|
+
/**
|
|
8
|
+
* Core Debug event type for the ai-collab
|
|
9
|
+
* @alpha
|
|
10
|
+
*/
|
|
11
|
+
export interface DebugEvent {
|
|
12
|
+
/**
|
|
13
|
+
* The unique id of the debug event.
|
|
14
|
+
*/
|
|
15
|
+
id: string;
|
|
16
|
+
/**
|
|
17
|
+
* An id that will be shared across all debug events that originate from the same single execution of ai-collab.
|
|
18
|
+
* @remarks This is intended to be used to correlate all debug events that originate from the same execution
|
|
19
|
+
*/
|
|
20
|
+
traceId: string;
|
|
21
|
+
/**
|
|
22
|
+
* The name of the debug event.
|
|
23
|
+
*/
|
|
24
|
+
eventName: string;
|
|
25
|
+
/**
|
|
26
|
+
* The date and time at which the debug event was created.
|
|
27
|
+
*/
|
|
28
|
+
timestamp: string;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* A Debug event that marks the start or end of a single core logic flow, such as generated tree edits, planning prompt, etc.
|
|
32
|
+
* @alpha
|
|
33
|
+
*/
|
|
34
|
+
export interface EventFlowDebugEvent extends DebugEvent {
|
|
35
|
+
/**
|
|
36
|
+
* The name of the particular event flow.
|
|
37
|
+
*/
|
|
38
|
+
eventFlowName: string;
|
|
39
|
+
/**
|
|
40
|
+
* The status of the particular event flow.
|
|
41
|
+
*/
|
|
42
|
+
eventFlowStatus: "STARTED" | "COMPLETED" | "IN_PROGRESS";
|
|
43
|
+
/**
|
|
44
|
+
* A unique id that will be shared across all debug events that are part of the same event flow.
|
|
45
|
+
*/
|
|
46
|
+
eventFlowTraceId: string;
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* A callback function that can be used to handle debug events that occur during the AI collaboration process.
|
|
50
|
+
* @alpha
|
|
51
|
+
*/
|
|
52
|
+
export type DebugEventLogHandler = <T extends DebugEvent>(event: T) => unknown;
|
|
7
53
|
/**
|
|
8
54
|
* OpenAI client options for the {@link AiCollabOptions} interface.
|
|
9
55
|
*
|
|
@@ -95,9 +141,9 @@ export interface AiCollabOptions {
|
|
|
95
141
|
*/
|
|
96
142
|
readonly validator?: (newContent: TreeNode) => void;
|
|
97
143
|
/**
|
|
98
|
-
*
|
|
144
|
+
* An optional handler for debug events that occur during the AI collaboration.
|
|
99
145
|
*/
|
|
100
|
-
readonly
|
|
146
|
+
readonly debugEventLogHandler?: DebugEventLogHandler;
|
|
101
147
|
}
|
|
102
148
|
/**
|
|
103
149
|
* A successful response from the AI collaboration.
|
|
@@ -133,8 +179,9 @@ export interface AiCollabErrorResponse {
|
|
|
133
179
|
* - 'tooManyErrors' indicates that the LLM made too many errors in a row
|
|
134
180
|
* - 'tooManyModelCalls' indicates that the LLM made too many model calls
|
|
135
181
|
* - 'aborted' indicates that the AI collaboration was aborted by the user or a limiter
|
|
182
|
+
* - 'unexpectedError' indicates that an unexpected error occured
|
|
136
183
|
*/
|
|
137
|
-
readonly errorMessage: "tokenLimitExceeded" | "tooManyErrors" | "tooManyModelCalls" | "aborted";
|
|
184
|
+
readonly errorMessage: "tokenLimitExceeded" | "tooManyErrors" | "tooManyModelCalls" | "aborted" | "unexpectedError";
|
|
138
185
|
/**
|
|
139
186
|
* {@inheritDoc TokenUsage}
|
|
140
187
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"aiCollabApi.d.ts","sourceRoot":"","sources":["../src/aiCollabApi.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,sBAAsB,CAAC;AAErD,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAEjC;;;;GAIG;AACH,MAAM,WAAW,mBAAmB;IACnC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;;;GAIG;AACH,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,QAAQ,CAAC,MAAM,EAAE,mBAAmB,CAAC;IACrC;;;;;;OAMG;IACH,QAAQ,CAAC,QAAQ,EAAE,QAAQ,CAAC;IAC5B;;OAEG;IACH,QAAQ,CAAC,MAAM,EAAE;QAChB;;;WAGG;QACH,QAAQ,CAAC,iBAAiB,EAAE,MAAM,CAAC;QACnC;;WAEG;QACH,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;KACzB,CAAC;IACF;;OAEG;IACH,QAAQ,CAAC,QAAQ,CAAC,EAAE;QACnB;;WAEG;QACH,QAAQ,CAAC,eAAe,CAAC,EAAE,eAAe,CAAC;QAC3C;;;;WAIG;QACH,QAAQ,CAAC,mBAAmB,CAAC,EAAE,MAAM,CAAC;QACtC;;;;WAIG;QACH,QAAQ,CAAC,aAAa,CAAC,EAAE,MAAM,CAAC;QAChC;;;;;WAKG;QACH,QAAQ,CAAC,WAAW,CAAC,EAAE,WAAW,CAAC;KACnC,CAAC;IACF;;;;OAIG;IACH,QAAQ,CAAC,YAAY,CAAC,EAAE,OAAO,CAAC;IAChC;;;OAGG;IACH,QAAQ,CAAC,eAAe,CAAC,EAAE,OAAO,CAAC;IACnC;;OAEG;IACH,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,UAAU,EAAE,QAAQ,KAAK,IAAI,CAAC;IACpD;;OAEG;IACH,QAAQ,CAAC,
|
|
1
|
+
{"version":3,"file":"aiCollabApi.d.ts","sourceRoot":"","sources":["../src/aiCollabApi.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,sBAAsB,CAAC;AAErD,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAEjC;;;GAGG;AACH,MAAM,WAAW,UAAU;IAC1B;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IACX;;;OAGG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;CAClB;AAED;;;GAGG;AACH,MAAM,WAAW,mBAAoB,SAAQ,UAAU;IACtD;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,eAAe,EAAE,SAAS,GAAG,WAAW,GAAG,aAAa,CAAC;IACzD;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;CACzB;AAED;;;GAGG;AACH,MAAM,MAAM,oBAAoB,GAAG,CAAC,CAAC,SAAS,UAAU,EAAE,KAAK,EAAE,CAAC,KAAK,OAAO,CAAC;AAE/E;;;;GAIG;AACH,MAAM,WAAW,mBAAmB;IACnC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;;;GAIG;AACH,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,QAAQ,CAAC,MAAM,EAAE,mBAAmB,CAAC;IACrC;;;;;;OAMG;IACH,QAAQ,CAAC,QAAQ,EAAE,QAAQ,CAAC;IAC5B;;OAEG;IACH,QAAQ,CAAC,MAAM,EAAE;QAChB;;;WAGG;QACH,QAAQ,CAAC,iBAAiB,EAAE,MAAM,CAAC;QACnC;;WAEG;QACH,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;KACzB,CAAC;IACF;;OAEG;IACH,QAAQ,CAAC,QAAQ,CAAC,EAAE;QACnB;;WAEG;QACH,QAAQ,CAAC,eAAe,CAAC,EAAE,eAAe,CAAC;QAC3C;;;;WAIG;QACH,QAAQ,CAAC,mBAAmB,CAAC,EAAE,MAAM,CAAC;QACtC;;;;WAIG;QACH,QAAQ,CAAC,aAAa,CAAC,EAAE,MAAM,CAAC;QAChC;;;;;WAKG;QACH,QAAQ,CAAC,WAAW,CAAC,EAAE,WAAW,CAAC;KACnC,CAAC;IACF;;;;OAIG;IACH,QAAQ,CAAC,YAAY,CAAC,EAAE,OAAO,CAAC;IAChC;;;OAGG;IACH,QAAQ,CAAC,eAAe,CAAC,EAAE,OAAO,CAAC;IACnC;;OAEG;IACH,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,UAAU,EAAE,QAAQ,KAAK,IAAI,CAAC;IACpD;;OAEG;IACH,QAAQ,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC;CACrD;AAED;;;;GAIG;AACH,MAAM,WAAW,uBAAuB;IACvC;;;OAGG;IACH,QAAQ,CAAC,MAAM,EAAE,SAAS,CAAC;IAC3B;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,UAAU,CAAC;CAChC;AAED;;;;GAIG;AACH,MAAM,WAAW,qBAAqB;IACrC;;;;OAIG;IACH,QAAQ,CAAC,MAAM,EAAE,SAAS,GAAG,iBAAiB,CAAC;IAC/C;;;;;;;OAOG;IACH,QAAQ,CAAC,YAAY,EAClB,oBAAoB,GACpB,eAAe,GACf,mBAAmB,GACnB,SAAS,GACT,iBAAiB,CAAC;IACrB;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,UAAU,CAAC;CAChC;AAED;;;;GAIG;AACH,MAAM,WAAW,UAAU;IAC1B;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,YAAY,EAAE,MAAM,CAAC;CACrB;AAED;;;;GAIG;AACH,MAAM,WAAW,WAAW;IAC3B;;OAEG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,MAAM,CAAC;IAC9B;;OAEG;IACH,QAAQ,CAAC,YAAY,CAAC,EAAE,MAAM,CAAC;CAC/B"}
|
package/dist/aiCollabApi.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"aiCollabApi.js","sourceRoot":"","sources":["../src/aiCollabApi.ts"],"names":[],"mappings":";AAAA;;;GAGG","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport type { TreeNode } from \"@fluidframework/tree\";\n// eslint-disable-next-line import/no-named-as-default\nimport type OpenAI from \"openai\";\n\n/**\n * OpenAI client options for the {@link AiCollabOptions} interface.\n *\n * @alpha\n */\nexport interface OpenAiClientOptions {\n\t/**\n\t * The OpenAI client to use for the AI collaboration.\n\t */\n\tclient: OpenAI;\n\t/**\n\t * The name of the target OpenAI model to use for the AI collaboration.\n\t */\n\tmodelName?: string;\n}\n\n/**\n * Options for the AI collaboration.\n *\n * @alpha\n */\nexport interface AiCollabOptions {\n\t/**\n\t * The OpenAI client options to use for the LLM based AI collaboration.\n\t */\n\treadonly openAI: OpenAiClientOptions;\n\t/**\n\t * The specific tree node you want the AI to collaborate on. Pass the root node of your tree if you intend\n\t * for the AI to work on the entire tree.\n\t * @remarks\n\t * - Optional root nodes are not supported\n\t * - Primitive root nodes are not supported\n\t */\n\treadonly treeNode: TreeNode;\n\t/**\n\t * The prompt context to give the LLM in order to collaborate with your applications data.\n\t */\n\treadonly prompt: {\n\t\t/**\n\t\t * The context to give the LLM about its role in the collaboration.\n\t\t * @remarks It's highly recommended to give context about your applications data model and the LLM's role in the collaboration.\n\t\t */\n\t\treadonly systemRoleContext: string;\n\t\t/**\n\t\t * The request from the users to the LLM.\n\t\t */\n\t\treadonly userAsk: string;\n\t};\n\t/**\n\t * Limiters are various optional ways to limit this library's usage of the LLM.\n\t */\n\treadonly limiters?: {\n\t\t/**\n\t\t * An optional AbortController that can be used to abort the AI collaboration while it is still in progress.\n\t\t */\n\t\treadonly abortController?: AbortController;\n\t\t/**\n\t\t * The maximum number of sequential errors the LLM can make before aborting the collaboration.\n\t\t * If the maximum number of sequential errors is reached, the AI collaboration will be aborted and return with the errorMessage 'tooManyErrors'.\n\t\t * Leaving this undefined will disable this limiter.\n\t\t */\n\t\treadonly maxSequentialErrors?: number;\n\t\t/**\n\t\t * The maximum number of model calls the LLM can make before aborting the collaboration.\n\t\t * If the maximum number of model calls is reached, the AI collaboration will be aborted and return with the errorMessage 'tooManyModelCalls'.\n\t\t * Leaving this undefined will disable this limiter.\n\t\t */\n\t\treadonly maxModelCalls?: number;\n\t\t/**\n\t\t * The maximum token usage limits for the LLM.\n\t\t * If the LLM exceeds the token limits, the AI collaboration will be aborted and return with the errorMessage 'tokenLimitExceeded'.\n\t\t * This happens after the first model call's token usage is calculated, meaning that the limits set may be exceeded by a certain amount.\n\t\t * Leaving this undefined will disable this limiter.\n\t\t */\n\t\treadonly tokenLimits?: TokenLimits;\n\t};\n\t/**\n\t * When set to true, the LLM will be asked to first produce a plan, based on the user's ask, before generating any changes to your applications data.\n\t * This can help the LLM produce better results.\n\t * When set to false, the LLM will not be asked to produce a plan.\n\t */\n\treadonly planningStep?: boolean;\n\t/**\n\t * When set to true, the LLM will be asked to complete a final review of the changes and determine if any additional changes need to be made.\n\t * When set to false, the LLM will not be asked to complete a final review.\n\t */\n\treadonly finalReviewStep?: boolean;\n\t/**\n\t * An optional validator function that can be used to validate the new content produced by the LLM.\n\t */\n\treadonly validator?: (newContent: TreeNode) => void;\n\t/**\n\t *
|
|
1
|
+
{"version":3,"file":"aiCollabApi.js","sourceRoot":"","sources":["../src/aiCollabApi.ts"],"names":[],"mappings":";AAAA;;;GAGG","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport type { TreeNode } from \"@fluidframework/tree\";\n// eslint-disable-next-line import/no-named-as-default\nimport type OpenAI from \"openai\";\n\n/**\n * Core Debug event type for the ai-collab\n * @alpha\n */\nexport interface DebugEvent {\n\t/**\n\t * The unique id of the debug event.\n\t */\n\tid: string;\n\t/**\n\t * An id that will be shared across all debug events that originate from the same single execution of ai-collab.\n\t * @remarks This is intended to be used to correlate all debug events that originate from the same execution\n\t */\n\ttraceId: string;\n\t/**\n\t * The name of the debug event.\n\t */\n\teventName: string;\n\t/**\n\t * The date and time at which the debug event was created.\n\t */\n\ttimestamp: string;\n}\n\n/**\n * A Debug event that marks the start or end of a single core logic flow, such as generated tree edits, planning prompt, etc.\n * @alpha\n */\nexport interface EventFlowDebugEvent extends DebugEvent {\n\t/**\n\t * The name of the particular event flow.\n\t */\n\teventFlowName: string;\n\t/**\n\t * The status of the particular event flow.\n\t */\n\teventFlowStatus: \"STARTED\" | \"COMPLETED\" | \"IN_PROGRESS\";\n\t/**\n\t * A unique id that will be shared across all debug events that are part of the same event flow.\n\t */\n\teventFlowTraceId: string;\n}\n\n/**\n * A callback function that can be used to handle debug events that occur during the AI collaboration process.\n * @alpha\n */\nexport type DebugEventLogHandler = <T extends DebugEvent>(event: T) => unknown;\n\n/**\n * OpenAI client options for the {@link AiCollabOptions} interface.\n *\n * @alpha\n */\nexport interface OpenAiClientOptions {\n\t/**\n\t * The OpenAI client to use for the AI collaboration.\n\t */\n\tclient: OpenAI;\n\t/**\n\t * The name of the target OpenAI model to use for the AI collaboration.\n\t */\n\tmodelName?: string;\n}\n\n/**\n * Options for the AI collaboration.\n *\n * @alpha\n */\nexport interface AiCollabOptions {\n\t/**\n\t * The OpenAI client options to use for the LLM based AI collaboration.\n\t */\n\treadonly openAI: OpenAiClientOptions;\n\t/**\n\t * The specific tree node you want the AI to collaborate on. Pass the root node of your tree if you intend\n\t * for the AI to work on the entire tree.\n\t * @remarks\n\t * - Optional root nodes are not supported\n\t * - Primitive root nodes are not supported\n\t */\n\treadonly treeNode: TreeNode;\n\t/**\n\t * The prompt context to give the LLM in order to collaborate with your applications data.\n\t */\n\treadonly prompt: {\n\t\t/**\n\t\t * The context to give the LLM about its role in the collaboration.\n\t\t * @remarks It's highly recommended to give context about your applications data model and the LLM's role in the collaboration.\n\t\t */\n\t\treadonly systemRoleContext: string;\n\t\t/**\n\t\t * The request from the users to the LLM.\n\t\t */\n\t\treadonly userAsk: string;\n\t};\n\t/**\n\t * Limiters are various optional ways to limit this library's usage of the LLM.\n\t */\n\treadonly limiters?: {\n\t\t/**\n\t\t * An optional AbortController that can be used to abort the AI collaboration while it is still in progress.\n\t\t */\n\t\treadonly abortController?: AbortController;\n\t\t/**\n\t\t * The maximum number of sequential errors the LLM can make before aborting the collaboration.\n\t\t * If the maximum number of sequential errors is reached, the AI collaboration will be aborted and return with the errorMessage 'tooManyErrors'.\n\t\t * Leaving this undefined will disable this limiter.\n\t\t */\n\t\treadonly maxSequentialErrors?: number;\n\t\t/**\n\t\t * The maximum number of model calls the LLM can make before aborting the collaboration.\n\t\t * If the maximum number of model calls is reached, the AI collaboration will be aborted and return with the errorMessage 'tooManyModelCalls'.\n\t\t * Leaving this undefined will disable this limiter.\n\t\t */\n\t\treadonly maxModelCalls?: number;\n\t\t/**\n\t\t * The maximum token usage limits for the LLM.\n\t\t * If the LLM exceeds the token limits, the AI collaboration will be aborted and return with the errorMessage 'tokenLimitExceeded'.\n\t\t * This happens after the first model call's token usage is calculated, meaning that the limits set may be exceeded by a certain amount.\n\t\t * Leaving this undefined will disable this limiter.\n\t\t */\n\t\treadonly tokenLimits?: TokenLimits;\n\t};\n\t/**\n\t * When set to true, the LLM will be asked to first produce a plan, based on the user's ask, before generating any changes to your applications data.\n\t * This can help the LLM produce better results.\n\t * When set to false, the LLM will not be asked to produce a plan.\n\t */\n\treadonly planningStep?: boolean;\n\t/**\n\t * When set to true, the LLM will be asked to complete a final review of the changes and determine if any additional changes need to be made.\n\t * When set to false, the LLM will not be asked to complete a final review.\n\t */\n\treadonly finalReviewStep?: boolean;\n\t/**\n\t * An optional validator function that can be used to validate the new content produced by the LLM.\n\t */\n\treadonly validator?: (newContent: TreeNode) => void;\n\t/**\n\t * An optional handler for debug events that occur during the AI collaboration.\n\t */\n\treadonly debugEventLogHandler?: DebugEventLogHandler;\n}\n\n/**\n * A successful response from the AI collaboration.\n *\n * @alpha\n */\nexport interface AiCollabSuccessResponse {\n\t/**\n\t * The status of the Ai Collaboration.\n\t * A 'success' status indicates that the AI collaboration was successful at creating changes.\n\t */\n\treadonly status: \"success\";\n\t/**\n\t * {@inheritDoc TokenUsage}\n\t */\n\treadonly tokensUsed: TokenUsage;\n}\n\n/**\n * An error response from the AI collaboration.\n *\n * @alpha\n */\nexport interface AiCollabErrorResponse {\n\t/**\n\t * The status of the Ai Collaboration.\n\t * - A 'partial-failure' status indicates that the AI collaboration was partially successful, but was aborted due to a limiter or other error\n\t * - A \"failure\" status indicates that the AI collaboration was not successful at creating any changes.\n\t */\n\treadonly status: \"failure\" | \"partial-failure\";\n\t/**\n\t * The type of known error that occured\n\t * - 'tokenLimitExceeded' indicates that the LLM exceeded the token limits set by the user\n\t * - 'tooManyErrors' indicates that the LLM made too many errors in a row\n\t * - 'tooManyModelCalls' indicates that the LLM made too many model calls\n\t * - 'aborted' indicates that the AI collaboration was aborted by the user or a limiter\n\t * - 'unexpectedError' indicates that an unexpected error occured\n\t */\n\treadonly errorMessage:\n\t\t| \"tokenLimitExceeded\"\n\t\t| \"tooManyErrors\"\n\t\t| \"tooManyModelCalls\"\n\t\t| \"aborted\"\n\t\t| \"unexpectedError\";\n\t/**\n\t * {@inheritDoc TokenUsage}\n\t */\n\treadonly tokensUsed: TokenUsage;\n}\n\n/**\n * Total usage of tokens by an LLM.\n *\n * @alpha\n */\nexport interface TokenUsage {\n\t/**\n\t * The total number of tokens used by the LLM for input.\n\t */\n\tinputTokens: number;\n\t/**\n\t * The total number of tokens used by the LLM for output.\n\t */\n\toutputTokens: number;\n}\n\n/**\n * Maximum limits for the total tokens that can be used by an llm\n *\n * @alpha\n */\nexport interface TokenLimits {\n\t/**\n\t * The maximum number of tokens that can be used by the LLM for input.\n\t */\n\treadonly inputTokens?: number;\n\t/**\n\t * The maximum number of tokens that can be used by the LLM for output.\n\t */\n\treadonly outputTokens?: number;\n}\n"]}
|
package/dist/alpha.d.ts
CHANGED
|
@@ -22,14 +22,31 @@ export {
|
|
|
22
22
|
AiCollabErrorResponse,
|
|
23
23
|
AiCollabOptions,
|
|
24
24
|
AiCollabSuccessResponse,
|
|
25
|
+
ApplyEditFailure,
|
|
26
|
+
ApplyEditSuccess,
|
|
27
|
+
CoreEventLoopCompleted,
|
|
28
|
+
CoreEventLoopStarted,
|
|
29
|
+
DebugEvent,
|
|
30
|
+
DebugEventLogHandler,
|
|
25
31
|
Difference,
|
|
26
32
|
DifferenceChange,
|
|
27
33
|
DifferenceCreate,
|
|
28
34
|
DifferenceMove,
|
|
29
35
|
DifferenceRemove,
|
|
36
|
+
EventFlowDebugEvent,
|
|
37
|
+
EventFlowDebugName,
|
|
38
|
+
EventFlowDebugNames,
|
|
39
|
+
FinalReviewCompleted,
|
|
40
|
+
FinalReviewStarted,
|
|
41
|
+
GenerateTreeEditCompleted,
|
|
42
|
+
GenerateTreeEditStarted,
|
|
43
|
+
LlmApiCallDebugEvent,
|
|
44
|
+
LlmTreeEdit,
|
|
30
45
|
ObjectPath,
|
|
31
46
|
OpenAiClientOptions,
|
|
32
47
|
Options,
|
|
48
|
+
PlanningPromptCompleted,
|
|
49
|
+
PlanningPromptStarted,
|
|
33
50
|
SharedTreeBranchManager,
|
|
34
51
|
TokenLimits,
|
|
35
52
|
TokenUsage,
|