@augmentcode/auggie-sdk 0.1.10 → 0.1.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +56 -1
- package/dist/auggie/sdk-acp-client.d.ts +7 -6
- package/dist/auggie/sdk-acp-client.js +299 -332
- package/dist/auggie/sdk-mcp-server.d.ts +5 -3
- package/dist/auggie/sdk-mcp-server.js +102 -112
- package/dist/context/direct-context.d.ts +82 -22
- package/dist/context/direct-context.js +675 -562
- package/dist/context/filesystem-context.d.ts +5 -3
- package/dist/context/filesystem-context.js +187 -209
- package/dist/context/internal/__mocks__/api-client.d.ts +17 -11
- package/dist/context/internal/__mocks__/api-client.js +104 -91
- package/dist/context/internal/api-client.d.ts +14 -11
- package/dist/context/internal/api-client.js +234 -239
- package/dist/context/internal/blob-name-calculator.d.ts +5 -4
- package/dist/context/internal/blob-name-calculator.js +41 -38
- package/dist/context/internal/chat-utils.d.ts +6 -3
- package/dist/context/internal/chat-utils.js +5 -18
- package/dist/context/internal/credentials.d.ts +5 -4
- package/dist/context/internal/credentials.js +24 -38
- package/dist/context/internal/retry-utils.d.ts +5 -4
- package/dist/context/internal/retry-utils.js +60 -114
- package/dist/context/internal/search-utils.d.ts +3 -2
- package/dist/context/internal/search-utils.js +8 -9
- package/dist/context/internal/session-reader.d.ts +4 -3
- package/dist/context/internal/session-reader.js +14 -22
- package/dist/context/types.d.ts +132 -13
- package/dist/context/types.js +0 -5
- package/dist/index.d.ts +8 -7
- package/dist/index.js +14 -9
- package/dist/version.d.ts +3 -2
- package/dist/version.js +24 -38
- package/package.json +3 -2
- package/dist/auggie/sdk-acp-client.d.ts.map +0 -1
- package/dist/auggie/sdk-acp-client.js.map +0 -1
- package/dist/auggie/sdk-mcp-server.d.ts.map +0 -1
- package/dist/auggie/sdk-mcp-server.js.map +0 -1
- package/dist/context/direct-context.d.ts.map +0 -1
- package/dist/context/direct-context.js.map +0 -1
- package/dist/context/filesystem-context.d.ts.map +0 -1
- package/dist/context/filesystem-context.js.map +0 -1
- package/dist/context/internal/__mocks__/api-client.d.ts.map +0 -1
- package/dist/context/internal/__mocks__/api-client.js.map +0 -1
- package/dist/context/internal/api-client.d.ts.map +0 -1
- package/dist/context/internal/api-client.js.map +0 -1
- package/dist/context/internal/blob-name-calculator.d.ts.map +0 -1
- package/dist/context/internal/blob-name-calculator.js.map +0 -1
- package/dist/context/internal/chat-utils.d.ts.map +0 -1
- package/dist/context/internal/chat-utils.js.map +0 -1
- package/dist/context/internal/credentials.d.ts.map +0 -1
- package/dist/context/internal/credentials.js.map +0 -1
- package/dist/context/internal/retry-utils.d.ts.map +0 -1
- package/dist/context/internal/retry-utils.js.map +0 -1
- package/dist/context/internal/search-utils.d.ts.map +0 -1
- package/dist/context/internal/search-utils.js.map +0 -1
- package/dist/context/internal/session-reader.d.ts.map +0 -1
- package/dist/context/internal/session-reader.js.map +0 -1
- package/dist/context/types.d.ts.map +0 -1
- package/dist/context/types.js.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/version.d.ts.map +0 -1
- package/dist/version.js.map +0 -1
|
@@ -1,252 +1,247 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* API client for Context operations
|
|
3
|
-
* Handles both indexing endpoints and LLM chat endpoint
|
|
4
|
-
*/
|
|
5
1
|
import { v4 as uuidv4 } from "uuid";
|
|
6
|
-
import { getSDKVersion } from "../../version";
|
|
7
|
-
import { retryChat, retryWithBackoff } from "./retry-utils";
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
this.statusText = statusText;
|
|
18
|
-
}
|
|
2
|
+
import { getSDKVersion } from "../../version.js";
|
|
3
|
+
import { retryChat, retryWithBackoff } from "./retry-utils.js";
|
|
4
|
+
class APIError extends Error {
|
|
5
|
+
status;
|
|
6
|
+
statusText;
|
|
7
|
+
constructor(status, statusText, message) {
|
|
8
|
+
super(message);
|
|
9
|
+
this.name = "APIError";
|
|
10
|
+
this.status = status;
|
|
11
|
+
this.statusText = statusText;
|
|
12
|
+
}
|
|
19
13
|
}
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
*/
|
|
23
|
-
export function getUserAgent() {
|
|
24
|
-
return `augment.sdk.context/${getSDKVersion()} (typescript)`;
|
|
14
|
+
function getUserAgent() {
|
|
15
|
+
return `augment.sdk.context/${getSDKVersion()} (typescript)`;
|
|
25
16
|
}
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
17
|
+
class ContextAPIClient {
|
|
18
|
+
apiKey;
|
|
19
|
+
apiUrl;
|
|
20
|
+
sessionId;
|
|
21
|
+
debug;
|
|
22
|
+
constructor(options) {
|
|
23
|
+
this.apiKey = options.apiKey;
|
|
24
|
+
this.apiUrl = options.apiUrl;
|
|
25
|
+
this.sessionId = uuidv4();
|
|
26
|
+
this.debug = options.debug ?? false;
|
|
27
|
+
}
|
|
28
|
+
log(message) {
|
|
29
|
+
if (this.debug) {
|
|
30
|
+
console.log(`[ContextAPI] ${message}`);
|
|
40
31
|
}
|
|
41
|
-
|
|
42
|
-
|
|
32
|
+
}
|
|
33
|
+
createRequestId() {
|
|
34
|
+
return uuidv4();
|
|
35
|
+
}
|
|
36
|
+
async callApi(endpoint, payload, requestId) {
|
|
37
|
+
const baseUrl = this.apiUrl.endsWith("/") ? this.apiUrl.slice(0, -1) : this.apiUrl;
|
|
38
|
+
const url = `${baseUrl}/${endpoint}`;
|
|
39
|
+
this.log(`POST ${url}`);
|
|
40
|
+
this.log(`Request ID: ${requestId}`);
|
|
41
|
+
this.log(`Request: ${JSON.stringify(payload, null, 2)}`);
|
|
42
|
+
const response = await fetch(url, {
|
|
43
|
+
method: "POST",
|
|
44
|
+
headers: {
|
|
45
|
+
"Content-Type": "application/json",
|
|
46
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
47
|
+
"X-Request-Session-Id": this.sessionId,
|
|
48
|
+
"X-Request-Id": requestId,
|
|
49
|
+
"User-Agent": getUserAgent()
|
|
50
|
+
},
|
|
51
|
+
body: JSON.stringify(payload)
|
|
52
|
+
});
|
|
53
|
+
if (!response.ok) {
|
|
54
|
+
const errorText = await response.text();
|
|
55
|
+
throw new APIError(
|
|
56
|
+
response.status,
|
|
57
|
+
response.statusText,
|
|
58
|
+
`API request failed: ${response.status} ${response.statusText} - ${errorText}`
|
|
59
|
+
);
|
|
43
60
|
}
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
61
|
+
const json = await response.json();
|
|
62
|
+
this.log(`Response: ${JSON.stringify(json, null, 2)}`);
|
|
63
|
+
return json;
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Call API with automatic retry logic and stable request ID across retries
|
|
67
|
+
*/
|
|
68
|
+
async callApiWithRetry(endpoint, payload) {
|
|
69
|
+
const requestId = this.createRequestId();
|
|
70
|
+
return await retryWithBackoff(
|
|
71
|
+
() => this.callApi(endpoint, payload, requestId),
|
|
72
|
+
this.debug
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
async findMissing(blobNames) {
|
|
76
|
+
const result = await this.callApiWithRetry("find-missing", {
|
|
77
|
+
mem_object_names: blobNames
|
|
78
|
+
});
|
|
79
|
+
return {
|
|
80
|
+
unknownBlobNames: result.unknown_memory_names || [],
|
|
81
|
+
nonindexedBlobNames: result.nonindexed_blob_names || []
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
async batchUpload(blobs) {
|
|
85
|
+
const result = await this.callApiWithRetry(
|
|
86
|
+
"batch-upload",
|
|
87
|
+
{
|
|
88
|
+
blobs: blobs.map((blob) => ({
|
|
89
|
+
blob_name: blob.blobName,
|
|
90
|
+
path: blob.pathName,
|
|
91
|
+
content: blob.text
|
|
92
|
+
}))
|
|
93
|
+
}
|
|
94
|
+
);
|
|
95
|
+
return {
|
|
96
|
+
blobNames: result.blob_names || []
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
async checkpointBlobs(blobs) {
|
|
100
|
+
const result = await this.callApiWithRetry(
|
|
101
|
+
"checkpoint-blobs",
|
|
102
|
+
{
|
|
103
|
+
blobs: {
|
|
104
|
+
checkpoint_id: blobs.checkpointId ?? null,
|
|
105
|
+
added_blobs: blobs.addedBlobs,
|
|
106
|
+
deleted_blobs: blobs.deletedBlobs
|
|
67
107
|
}
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
108
|
+
}
|
|
109
|
+
);
|
|
110
|
+
return {
|
|
111
|
+
newCheckpointId: result.new_checkpoint_id
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
async agentCodebaseRetrieval(query, blobs, maxOutputLength) {
|
|
115
|
+
const requestBody = {
|
|
116
|
+
information_request: query,
|
|
117
|
+
blobs: {
|
|
118
|
+
checkpoint_id: blobs.checkpointId ?? null,
|
|
119
|
+
added_blobs: blobs.addedBlobs,
|
|
120
|
+
deleted_blobs: blobs.deletedBlobs
|
|
121
|
+
},
|
|
122
|
+
dialog: []
|
|
123
|
+
};
|
|
124
|
+
if (maxOutputLength !== void 0) {
|
|
125
|
+
requestBody.max_output_length = maxOutputLength;
|
|
78
126
|
}
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
};
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
information_request: query,
|
|
115
|
-
blobs: {
|
|
116
|
-
checkpoint_id: blobs.checkpointId ?? null,
|
|
117
|
-
added_blobs: blobs.addedBlobs,
|
|
118
|
-
deleted_blobs: blobs.deletedBlobs,
|
|
119
|
-
},
|
|
120
|
-
dialog: [],
|
|
121
|
-
};
|
|
122
|
-
if (maxOutputLength !== undefined) {
|
|
123
|
-
requestBody.max_output_length = maxOutputLength;
|
|
124
|
-
}
|
|
125
|
-
const result = await this.callApiWithRetry("agents/codebase-retrieval", requestBody);
|
|
126
|
-
return {
|
|
127
|
-
formattedRetrieval: result.formatted_retrieval,
|
|
128
|
-
};
|
|
129
|
-
}
|
|
130
|
-
/**
|
|
131
|
-
* Parse streaming response and accumulate text chunks
|
|
132
|
-
* Uses a buffer to handle JSON objects that span multiple lines or chunks
|
|
133
|
-
*/
|
|
134
|
-
async parseSSEStream(body) {
|
|
135
|
-
const reader = body.getReader();
|
|
136
|
-
const decoder = new TextDecoder();
|
|
137
|
-
let accumulatedText = "";
|
|
138
|
-
let textBuffer = "";
|
|
139
|
-
try {
|
|
140
|
-
while (true) {
|
|
141
|
-
const { done, value } = await reader.read();
|
|
142
|
-
if (done)
|
|
143
|
-
break;
|
|
144
|
-
// Accumulate decoded text into buffer
|
|
145
|
-
textBuffer += decoder.decode(value, { stream: true });
|
|
146
|
-
// Parse Newline Delimited JSON - only process complete lines
|
|
147
|
-
while (textBuffer.includes("\n")) {
|
|
148
|
-
const newLineIndex = textBuffer.indexOf("\n");
|
|
149
|
-
const line = textBuffer.substring(0, newLineIndex);
|
|
150
|
-
textBuffer = textBuffer.substring(newLineIndex + 1);
|
|
151
|
-
// Parse the complete line
|
|
152
|
-
const trimmed = line.trim();
|
|
153
|
-
if (trimmed) {
|
|
154
|
-
try {
|
|
155
|
-
const parsed = JSON.parse(trimmed);
|
|
156
|
-
if (parsed.text) {
|
|
157
|
-
accumulatedText += parsed.text;
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
catch (e) {
|
|
161
|
-
// Log parsing errors - these indicate malformed responses from the backend
|
|
162
|
-
// With buffering, we should only see errors for genuinely invalid JSON
|
|
163
|
-
this.log(`JSON parse failed for line: ${trimmed}`);
|
|
164
|
-
}
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
}
|
|
168
|
-
// Flush the decoder to handle any pending multi-byte characters
|
|
169
|
-
const finalChunk = decoder.decode();
|
|
170
|
-
if (finalChunk) {
|
|
171
|
-
textBuffer += finalChunk;
|
|
172
|
-
}
|
|
173
|
-
// Process any remaining data in buffer (in case stream doesn't end with newline)
|
|
174
|
-
if (textBuffer.trim()) {
|
|
175
|
-
try {
|
|
176
|
-
const parsed = JSON.parse(textBuffer.trim());
|
|
177
|
-
if (parsed.text) {
|
|
178
|
-
accumulatedText += parsed.text;
|
|
179
|
-
}
|
|
180
|
-
}
|
|
181
|
-
catch (e) {
|
|
182
|
-
this.log(`JSON parse failed for remaining buffer: ${textBuffer.trim()}`);
|
|
183
|
-
}
|
|
127
|
+
const result = await this.callApiWithRetry(
|
|
128
|
+
"agents/codebase-retrieval",
|
|
129
|
+
requestBody
|
|
130
|
+
);
|
|
131
|
+
return {
|
|
132
|
+
formattedRetrieval: result.formatted_retrieval
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Parse streaming response and accumulate text chunks
|
|
137
|
+
* Uses a buffer to handle JSON objects that span multiple lines or chunks
|
|
138
|
+
*/
|
|
139
|
+
async parseSSEStream(body) {
|
|
140
|
+
const reader = body.getReader();
|
|
141
|
+
const decoder = new TextDecoder();
|
|
142
|
+
let accumulatedText = "";
|
|
143
|
+
let textBuffer = "";
|
|
144
|
+
try {
|
|
145
|
+
while (true) {
|
|
146
|
+
const { done, value } = await reader.read();
|
|
147
|
+
if (done) break;
|
|
148
|
+
textBuffer += decoder.decode(value, { stream: true });
|
|
149
|
+
while (textBuffer.includes("\n")) {
|
|
150
|
+
const newLineIndex = textBuffer.indexOf("\n");
|
|
151
|
+
const line = textBuffer.substring(0, newLineIndex);
|
|
152
|
+
textBuffer = textBuffer.substring(newLineIndex + 1);
|
|
153
|
+
const trimmed = line.trim();
|
|
154
|
+
if (trimmed) {
|
|
155
|
+
try {
|
|
156
|
+
const parsed = JSON.parse(trimmed);
|
|
157
|
+
if (parsed.text) {
|
|
158
|
+
accumulatedText += parsed.text;
|
|
159
|
+
}
|
|
160
|
+
} catch (e) {
|
|
161
|
+
this.log(`JSON parse failed for line: ${trimmed}`);
|
|
184
162
|
}
|
|
163
|
+
}
|
|
185
164
|
}
|
|
186
|
-
|
|
187
|
-
|
|
165
|
+
}
|
|
166
|
+
const finalChunk = decoder.decode();
|
|
167
|
+
if (finalChunk) {
|
|
168
|
+
textBuffer += finalChunk;
|
|
169
|
+
}
|
|
170
|
+
if (textBuffer.trim()) {
|
|
171
|
+
try {
|
|
172
|
+
const parsed = JSON.parse(textBuffer.trim());
|
|
173
|
+
if (parsed.text) {
|
|
174
|
+
accumulatedText += parsed.text;
|
|
175
|
+
}
|
|
176
|
+
} catch (e) {
|
|
177
|
+
this.log(`JSON parse failed for remaining buffer: ${textBuffer.trim()}`);
|
|
188
178
|
}
|
|
189
|
-
|
|
179
|
+
}
|
|
180
|
+
} finally {
|
|
181
|
+
reader.releaseLock();
|
|
190
182
|
}
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
id: 0,
|
|
215
|
-
type: 0, // ChatRequestNodeType.TEXT = 0
|
|
216
|
-
text_node: {
|
|
217
|
-
content: prompt,
|
|
218
|
-
},
|
|
219
|
-
},
|
|
220
|
-
],
|
|
221
|
-
chat_history: [],
|
|
222
|
-
conversation_id: this.sessionId,
|
|
223
|
-
};
|
|
224
|
-
this.log(`Request: ${JSON.stringify(payload, null, 2)}`);
|
|
225
|
-
const response = await fetch(url, {
|
|
226
|
-
method: "POST",
|
|
227
|
-
headers: {
|
|
228
|
-
"Content-Type": "application/json",
|
|
229
|
-
Authorization: `Bearer ${this.apiKey}`,
|
|
230
|
-
"X-Request-Session-Id": this.sessionId,
|
|
231
|
-
"X-Request-Id": requestId,
|
|
232
|
-
"conversation-id": this.sessionId,
|
|
233
|
-
"X-Mode": "sdk",
|
|
234
|
-
"User-Agent": getUserAgent(),
|
|
235
|
-
},
|
|
236
|
-
body: JSON.stringify(payload),
|
|
237
|
-
});
|
|
238
|
-
if (!response.ok) {
|
|
239
|
-
const errorText = await response.text();
|
|
240
|
-
throw new APIError(response.status, response.statusText, `API request failed: ${response.status} ${response.statusText} - ${errorText}`);
|
|
241
|
-
}
|
|
242
|
-
// Handle streaming response
|
|
243
|
-
if (!response.body) {
|
|
244
|
-
throw new Error("Response body is null");
|
|
183
|
+
return accumulatedText;
|
|
184
|
+
}
|
|
185
|
+
/**
|
|
186
|
+
* Call the LLM chat streaming API with a formatted prompt
|
|
187
|
+
*
|
|
188
|
+
* @param prompt - The formatted prompt to send to the LLM
|
|
189
|
+
* @returns The LLM's response text
|
|
190
|
+
*/
|
|
191
|
+
async chat(prompt) {
|
|
192
|
+
const requestId = this.createRequestId();
|
|
193
|
+
return await retryChat(async () => {
|
|
194
|
+
const baseUrl = this.apiUrl.endsWith("/") ? this.apiUrl.slice(0, -1) : this.apiUrl;
|
|
195
|
+
const url = `${baseUrl}/chat-stream`;
|
|
196
|
+
this.log(`POST ${url}`);
|
|
197
|
+
this.log(`Request ID: ${requestId}`);
|
|
198
|
+
const payload = {
|
|
199
|
+
nodes: [
|
|
200
|
+
{
|
|
201
|
+
id: 0,
|
|
202
|
+
type: 0,
|
|
203
|
+
// ChatRequestNodeType.TEXT = 0
|
|
204
|
+
text_node: {
|
|
205
|
+
content: prompt
|
|
245
206
|
}
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
207
|
+
}
|
|
208
|
+
],
|
|
209
|
+
chat_history: [],
|
|
210
|
+
conversation_id: this.sessionId
|
|
211
|
+
};
|
|
212
|
+
this.log(`Request: ${JSON.stringify(payload, null, 2)}`);
|
|
213
|
+
const response = await fetch(url, {
|
|
214
|
+
method: "POST",
|
|
215
|
+
headers: {
|
|
216
|
+
"Content-Type": "application/json",
|
|
217
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
218
|
+
"X-Request-Session-Id": this.sessionId,
|
|
219
|
+
"X-Request-Id": requestId,
|
|
220
|
+
"conversation-id": this.sessionId,
|
|
221
|
+
"X-Mode": "sdk",
|
|
222
|
+
"User-Agent": getUserAgent()
|
|
223
|
+
},
|
|
224
|
+
body: JSON.stringify(payload)
|
|
225
|
+
});
|
|
226
|
+
if (!response.ok) {
|
|
227
|
+
const errorText = await response.text();
|
|
228
|
+
throw new APIError(
|
|
229
|
+
response.status,
|
|
230
|
+
response.statusText,
|
|
231
|
+
`API request failed: ${response.status} ${response.statusText} - ${errorText}`
|
|
232
|
+
);
|
|
233
|
+
}
|
|
234
|
+
if (!response.body) {
|
|
235
|
+
throw new Error("Response body is null");
|
|
236
|
+
}
|
|
237
|
+
const accumulatedText = await this.parseSSEStream(response.body);
|
|
238
|
+
this.log(`Response: ${accumulatedText}`);
|
|
239
|
+
return accumulatedText;
|
|
240
|
+
}, this.debug);
|
|
241
|
+
}
|
|
251
242
|
}
|
|
252
|
-
|
|
243
|
+
export {
|
|
244
|
+
APIError,
|
|
245
|
+
ContextAPIClient,
|
|
246
|
+
getUserAgent
|
|
247
|
+
};
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
declare const blobNamingVersion = 2023102300;
|
|
2
|
+
declare class BlobTooLargeError extends Error {
|
|
3
3
|
constructor(maxBlobSize: number);
|
|
4
4
|
}
|
|
5
|
-
|
|
5
|
+
declare class BlobNameCalculator {
|
|
6
6
|
private readonly _textEncoder;
|
|
7
7
|
readonly maxBlobSize: number;
|
|
8
8
|
constructor(maxBlobSize: number);
|
|
@@ -11,4 +11,5 @@ export declare class BlobNameCalculator {
|
|
|
11
11
|
calculate(path: string, contents: string | Uint8Array): string | undefined;
|
|
12
12
|
calculateNoThrow(path: string, contents: string | Uint8Array): string;
|
|
13
13
|
}
|
|
14
|
-
|
|
14
|
+
|
|
15
|
+
export { BlobNameCalculator, BlobTooLargeError, blobNamingVersion };
|
|
@@ -1,44 +1,47 @@
|
|
|
1
1
|
import { createHash } from "node:crypto";
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
2
|
+
const blobNamingVersion = 2023102300;
|
|
3
|
+
class BlobTooLargeError extends Error {
|
|
4
|
+
constructor(maxBlobSize) {
|
|
5
|
+
super(`content exceeds maximum size of ${maxBlobSize}`);
|
|
6
|
+
}
|
|
7
7
|
}
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
}
|
|
27
|
-
if (checkFileSize && contentsBytes.length > this.maxBlobSize) {
|
|
28
|
-
throw new BlobTooLargeError(this.maxBlobSize);
|
|
29
|
-
}
|
|
30
|
-
return this._hash(path, contentsBytes);
|
|
8
|
+
class BlobNameCalculator {
|
|
9
|
+
_textEncoder = new TextEncoder();
|
|
10
|
+
maxBlobSize;
|
|
11
|
+
constructor(maxBlobSize) {
|
|
12
|
+
this.maxBlobSize = maxBlobSize;
|
|
13
|
+
}
|
|
14
|
+
_hash(path, contents) {
|
|
15
|
+
const hash = createHash("sha256");
|
|
16
|
+
hash.update(path);
|
|
17
|
+
hash.update(contents);
|
|
18
|
+
return hash.digest("hex");
|
|
19
|
+
}
|
|
20
|
+
calculateOrThrow(path, contents, checkFileSize = true) {
|
|
21
|
+
let contentsBytes;
|
|
22
|
+
if (typeof contents === "string") {
|
|
23
|
+
contentsBytes = this._textEncoder.encode(contents);
|
|
24
|
+
} else {
|
|
25
|
+
contentsBytes = contents;
|
|
31
26
|
}
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
return this.calculateOrThrow(path, contents, true);
|
|
35
|
-
}
|
|
36
|
-
catch {
|
|
37
|
-
return;
|
|
38
|
-
}
|
|
27
|
+
if (checkFileSize && contentsBytes.length > this.maxBlobSize) {
|
|
28
|
+
throw new BlobTooLargeError(this.maxBlobSize);
|
|
39
29
|
}
|
|
40
|
-
|
|
41
|
-
|
|
30
|
+
return this._hash(path, contentsBytes);
|
|
31
|
+
}
|
|
32
|
+
calculate(path, contents) {
|
|
33
|
+
try {
|
|
34
|
+
return this.calculateOrThrow(path, contents, true);
|
|
35
|
+
} catch {
|
|
36
|
+
return;
|
|
42
37
|
}
|
|
38
|
+
}
|
|
39
|
+
calculateNoThrow(path, contents) {
|
|
40
|
+
return this.calculateOrThrow(path, contents, false);
|
|
41
|
+
}
|
|
43
42
|
}
|
|
44
|
-
|
|
43
|
+
export {
|
|
44
|
+
BlobNameCalculator,
|
|
45
|
+
BlobTooLargeError,
|
|
46
|
+
blobNamingVersion
|
|
47
|
+
};
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { ContextAPIClient } from './api-client.js';
|
|
2
|
+
import '../types.js';
|
|
3
|
+
|
|
2
4
|
/**
|
|
3
5
|
* Call the chat API with retry logic for transient failures
|
|
4
6
|
*
|
|
@@ -14,5 +16,6 @@ import type { ContextAPIClient } from "./api-client";
|
|
|
14
16
|
* @param _debug - Whether to log debug messages (unused, kept for compatibility)
|
|
15
17
|
* @returns The LLM's response text
|
|
16
18
|
*/
|
|
17
|
-
|
|
18
|
-
|
|
19
|
+
declare function chatWithRetry(apiClient: ContextAPIClient, prompt: string, _debug?: boolean): Promise<string>;
|
|
20
|
+
|
|
21
|
+
export { chatWithRetry };
|
|
@@ -1,19 +1,6 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
*
|
|
4
|
-
* This function wraps the ContextAPIClient.chat() method with automatic
|
|
5
|
-
* retry logic for transient failures, including chat-specific status codes
|
|
6
|
-
* like 429 (rate limit) and 529 (overloaded).
|
|
7
|
-
*
|
|
8
|
-
* Note: The retry logic is now handled internally by the chat() method,
|
|
9
|
-
* so this function is just a simple wrapper for backwards compatibility.
|
|
10
|
-
*
|
|
11
|
-
* @param apiClient - The API client to use for the chat request
|
|
12
|
-
* @param prompt - The formatted prompt to send to the LLM
|
|
13
|
-
* @param _debug - Whether to log debug messages (unused, kept for compatibility)
|
|
14
|
-
* @returns The LLM's response text
|
|
15
|
-
*/
|
|
16
|
-
export async function chatWithRetry(apiClient, prompt, _debug = false) {
|
|
17
|
-
return await apiClient.chat(prompt);
|
|
1
|
+
async function chatWithRetry(apiClient, prompt, _debug = false) {
|
|
2
|
+
return await apiClient.chat(prompt);
|
|
18
3
|
}
|
|
19
|
-
|
|
4
|
+
export {
|
|
5
|
+
chatWithRetry
|
|
6
|
+
};
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Options for credential resolution
|
|
3
3
|
*/
|
|
4
|
-
|
|
4
|
+
type CredentialOptions = {
|
|
5
5
|
apiKey?: string;
|
|
6
6
|
apiUrl?: string;
|
|
7
7
|
};
|
|
8
8
|
/**
|
|
9
9
|
* Resolved credentials
|
|
10
10
|
*/
|
|
11
|
-
|
|
11
|
+
type ResolvedCredentials = {
|
|
12
12
|
apiKey: string;
|
|
13
13
|
apiUrl: string;
|
|
14
14
|
};
|
|
@@ -24,5 +24,6 @@ export type ResolvedCredentials = {
|
|
|
24
24
|
* @returns Resolved credentials
|
|
25
25
|
* @throws Error if credentials cannot be resolved
|
|
26
26
|
*/
|
|
27
|
-
|
|
28
|
-
|
|
27
|
+
declare function resolveCredentials(options?: CredentialOptions): Promise<ResolvedCredentials>;
|
|
28
|
+
|
|
29
|
+
export { type CredentialOptions, type ResolvedCredentials, resolveCredentials };
|