llmasaservice-client 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/package.json +1 -1
- package/src/useLLM.ts +10 -11
package/CHANGELOG.md
CHANGED
package/package.json
CHANGED
package/src/useLLM.ts
CHANGED
|
@@ -4,16 +4,18 @@ import { LLMService, LLMServiceType } from "./LLMAsAService";
|
|
|
4
4
|
export interface UseLLMReturnType {
|
|
5
5
|
send: Function;
|
|
6
6
|
stop: Function;
|
|
7
|
-
response: string;
|
|
7
|
+
response: string;
|
|
8
8
|
idle: boolean;
|
|
9
9
|
error: string;
|
|
10
10
|
setResponse: Function;
|
|
11
|
+
lastCallId: string;
|
|
11
12
|
}
|
|
12
13
|
|
|
13
14
|
export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
|
|
14
15
|
const [response, setResponse] = useState<string>("");
|
|
15
16
|
const [idle, setIdle] = useState<boolean>(true);
|
|
16
17
|
const [error, setError] = useState<string>("");
|
|
18
|
+
const [lastCallId, setLastCallId] = useState<string>("");
|
|
17
19
|
|
|
18
20
|
let context = useContext(LLMService);
|
|
19
21
|
if (!context) {
|
|
@@ -28,7 +30,7 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
|
|
|
28
30
|
|
|
29
31
|
/**
|
|
30
32
|
* Stops the fetch request and returns the hook to an idle state. Use this to add abort functionality to your UI.
|
|
31
|
-
*
|
|
33
|
+
*
|
|
32
34
|
* @param controller An AbortController object to stop the fetch request and return this hook to an idle state, the controller should be the same one passed to the send function.
|
|
33
35
|
*/
|
|
34
36
|
const stop = (controller: AbortController | null) => {
|
|
@@ -73,8 +75,6 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
|
|
|
73
75
|
mode: "cors" as RequestMode,
|
|
74
76
|
headers: {
|
|
75
77
|
"Content-Type": "text/plain",
|
|
76
|
-
//"x-Amz-Content-Sha256": sha256.create().update(responseBody).hex(),
|
|
77
|
-
//"x-Amz-Content-Sha256": "UNSIGNED-PAYLOAD",
|
|
78
78
|
},
|
|
79
79
|
body: responseBody,
|
|
80
80
|
};
|
|
@@ -85,17 +85,16 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
|
|
|
85
85
|
if (!response.ok) {
|
|
86
86
|
errorInFetch = `Error: Network error for service. (${response.status} ${response.statusText})`;
|
|
87
87
|
} else {
|
|
88
|
+
setLastCallId(response.headers.get("x-callId") ?? "");
|
|
88
89
|
const reader =
|
|
89
90
|
response?.body?.getReader() as ReadableStreamDefaultReader;
|
|
90
91
|
const decoder = new TextDecoder("utf-8");
|
|
91
92
|
setIdle(false);
|
|
92
93
|
|
|
93
94
|
if (!stream) {
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
})
|
|
98
|
-
);
|
|
95
|
+
return await readStream(reader, decoder, stream, {
|
|
96
|
+
signal: options.signal,
|
|
97
|
+
});
|
|
99
98
|
} else {
|
|
100
99
|
readStream(reader, decoder, stream, {
|
|
101
100
|
signal: options.signal,
|
|
@@ -172,7 +171,7 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
|
|
|
172
171
|
return result;
|
|
173
172
|
}
|
|
174
173
|
|
|
175
|
-
return { response, send, stop, idle, error, setResponse };
|
|
174
|
+
return { response, send, stop, idle, error, setResponse, lastCallId };
|
|
176
175
|
};
|
|
177
176
|
|
|
178
|
-
export default useLLM;
|
|
177
|
+
export default useLLM;
|