llmasaservice-client 0.2.2 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.js +3 -3
- package/dist/index.mjs +3 -3
- package/package.json +1 -1
- package/src/useLLM.ts +6 -7
package/CHANGELOG.md
CHANGED
package/dist/index.js
CHANGED
|
@@ -161,9 +161,6 @@ var useLLM = (options) => {
|
|
|
161
161
|
}
|
|
162
162
|
if (done) {
|
|
163
163
|
setIdle(true);
|
|
164
|
-
if (onCompleteCallback) {
|
|
165
|
-
onCompleteCallback(result);
|
|
166
|
-
}
|
|
167
164
|
break;
|
|
168
165
|
}
|
|
169
166
|
result += decoder.decode(value);
|
|
@@ -185,6 +182,9 @@ var useLLM = (options) => {
|
|
|
185
182
|
reader.cancel();
|
|
186
183
|
setIdle(true);
|
|
187
184
|
}
|
|
185
|
+
if (onCompleteCallback) {
|
|
186
|
+
onCompleteCallback();
|
|
187
|
+
}
|
|
188
188
|
return result;
|
|
189
189
|
});
|
|
190
190
|
}
|
package/dist/index.mjs
CHANGED
|
@@ -124,9 +124,6 @@ var useLLM = (options) => {
|
|
|
124
124
|
}
|
|
125
125
|
if (done) {
|
|
126
126
|
setIdle(true);
|
|
127
|
-
if (onCompleteCallback) {
|
|
128
|
-
onCompleteCallback(result);
|
|
129
|
-
}
|
|
130
127
|
break;
|
|
131
128
|
}
|
|
132
129
|
result += decoder.decode(value);
|
|
@@ -148,6 +145,9 @@ var useLLM = (options) => {
|
|
|
148
145
|
reader.cancel();
|
|
149
146
|
setIdle(true);
|
|
150
147
|
}
|
|
148
|
+
if (onCompleteCallback) {
|
|
149
|
+
onCompleteCallback();
|
|
150
|
+
}
|
|
151
151
|
return result;
|
|
152
152
|
});
|
|
153
153
|
}
|
package/package.json
CHANGED
package/src/useLLM.ts
CHANGED
|
@@ -46,6 +46,7 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
|
|
|
46
46
|
* @param stream Determines whether to stream results back in the response property as they return from the service or batch them up and return them all at once in the response property as a string.
|
|
47
47
|
* @param abortController The AbortController used to abort this request once its started. This allows you to add a stop button to your UI.
|
|
48
48
|
* @param service The service to use for the request. If null, load balancing will be applied. This is typically only used for testing.
|
|
49
|
+
* @param onCompleteCallback The callback function to be called once the stream completes, with the final result string.
|
|
49
50
|
* @returns a StreamReader object if stream is true, otherwise a string of the response. Typically this isn't used when streaming, the stream is exposed in the response property.
|
|
50
51
|
*/
|
|
51
52
|
async function send(
|
|
@@ -54,7 +55,7 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
|
|
|
54
55
|
stream: boolean = true,
|
|
55
56
|
abortController: AbortController = new AbortController(),
|
|
56
57
|
service: string | null = null, // null means use the default service and apply services load balancing
|
|
57
|
-
onCompleteCallback?:
|
|
58
|
+
onCompleteCallback?: (result: string) => void
|
|
58
59
|
): Promise<ReadableStreamDefaultReader<any> | string | undefined> {
|
|
59
60
|
setResponse("");
|
|
60
61
|
setIdle(false);
|
|
@@ -144,12 +145,6 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
|
|
|
144
145
|
// If the stream has been read to the end, exit the loop
|
|
145
146
|
if (done) {
|
|
146
147
|
setIdle(true);
|
|
147
|
-
|
|
148
|
-
if (onCompleteCallback) {
|
|
149
|
-
onCompleteCallback(result);
|
|
150
|
-
|
|
151
|
-
}
|
|
152
|
-
|
|
153
148
|
break;
|
|
154
149
|
}
|
|
155
150
|
|
|
@@ -176,6 +171,10 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
|
|
|
176
171
|
setIdle(true);
|
|
177
172
|
}
|
|
178
173
|
|
|
174
|
+
if (onCompleteCallback) {
|
|
175
|
+
onCompleteCallback();
|
|
176
|
+
}
|
|
177
|
+
|
|
179
178
|
return result;
|
|
180
179
|
}
|
|
181
180
|
|