llmasaservice-client 0.2.5 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # llmasaservice-client
2
2
 
3
+ ## 0.3.0
4
+
5
+ ### Minor Changes
6
+
7
+ - Final onComplete and onError callbacks added to the send method
8
+
3
9
  ## 0.2.5
4
10
 
5
11
  ### Patch Changes
package/dist/index.js CHANGED
@@ -91,7 +91,7 @@ var useLLM = (options) => {
91
91
  setIdle(true);
92
92
  };
93
93
  function send(_0) {
94
- return __async(this, arguments, function* (prompt, messages = [], stream = true, abortController = new AbortController(), service = null, onCompleteCallback) {
94
+ return __async(this, arguments, function* (prompt, messages = [], stream = true, abortController = new AbortController(), service = null, onComplete, onError) {
95
95
  var _a, _b, _c, _d, _e;
96
96
  setResponse("");
97
97
  setIdle(false);
@@ -124,13 +124,27 @@ var useLLM = (options) => {
124
124
  const decoder = new TextDecoder("utf-8");
125
125
  setIdle(false);
126
126
  if (!stream) {
127
- return yield readStream(reader, decoder, stream, {
128
- signal: options2.signal
129
- }, onCompleteCallback);
127
+ return yield readStream(
128
+ reader,
129
+ decoder,
130
+ stream,
131
+ {
132
+ signal: options2.signal
133
+ },
134
+ onComplete,
135
+ onError
136
+ );
130
137
  } else {
131
- readStream(reader, decoder, stream, {
132
- signal: options2.signal
133
- }, onCompleteCallback);
138
+ readStream(
139
+ reader,
140
+ decoder,
141
+ stream,
142
+ {
143
+ signal: options2.signal
144
+ },
145
+ onComplete,
146
+ onError
147
+ );
134
148
  return reader;
135
149
  }
136
150
  }
@@ -139,12 +153,15 @@ var useLLM = (options) => {
139
153
  }
140
154
  if (errorInFetch !== "") {
141
155
  setError(errorInFetch);
156
+ if (onError) {
157
+ onError(errorInFetch);
158
+ }
142
159
  console.error(`Error: Error in fetch. (${errorInFetch})`);
143
160
  }
144
161
  });
145
162
  }
146
163
  function readStream(_0, _1) {
147
- return __async(this, arguments, function* (reader, decoder, stream = true, { signal }, onCompleteCallback) {
164
+ return __async(this, arguments, function* (reader, decoder, stream = true, { signal }, onComplete, onError) {
148
165
  let errorInRead = "";
149
166
  let result = "";
150
167
  while (true) {
@@ -180,12 +197,11 @@ var useLLM = (options) => {
180
197
  if (errorInRead !== "") {
181
198
  setError(errorInRead);
182
199
  reader.cancel();
200
+ if (onError) onError(errorInRead);
183
201
  setIdle(true);
184
202
  }
185
- console.log("about to call callback", onCompleteCallback);
186
- if (onCompleteCallback) {
187
- onCompleteCallback(result);
188
- console.log("called callback");
203
+ if (onComplete) {
204
+ onComplete(result);
189
205
  }
190
206
  return result;
191
207
  });
package/dist/index.mjs CHANGED
@@ -54,7 +54,7 @@ var useLLM = (options) => {
54
54
  setIdle(true);
55
55
  };
56
56
  function send(_0) {
57
- return __async(this, arguments, function* (prompt, messages = [], stream = true, abortController = new AbortController(), service = null, onCompleteCallback) {
57
+ return __async(this, arguments, function* (prompt, messages = [], stream = true, abortController = new AbortController(), service = null, onComplete, onError) {
58
58
  var _a, _b, _c, _d, _e;
59
59
  setResponse("");
60
60
  setIdle(false);
@@ -87,13 +87,27 @@ var useLLM = (options) => {
87
87
  const decoder = new TextDecoder("utf-8");
88
88
  setIdle(false);
89
89
  if (!stream) {
90
- return yield readStream(reader, decoder, stream, {
91
- signal: options2.signal
92
- }, onCompleteCallback);
90
+ return yield readStream(
91
+ reader,
92
+ decoder,
93
+ stream,
94
+ {
95
+ signal: options2.signal
96
+ },
97
+ onComplete,
98
+ onError
99
+ );
93
100
  } else {
94
- readStream(reader, decoder, stream, {
95
- signal: options2.signal
96
- }, onCompleteCallback);
101
+ readStream(
102
+ reader,
103
+ decoder,
104
+ stream,
105
+ {
106
+ signal: options2.signal
107
+ },
108
+ onComplete,
109
+ onError
110
+ );
97
111
  return reader;
98
112
  }
99
113
  }
@@ -102,12 +116,15 @@ var useLLM = (options) => {
102
116
  }
103
117
  if (errorInFetch !== "") {
104
118
  setError(errorInFetch);
119
+ if (onError) {
120
+ onError(errorInFetch);
121
+ }
105
122
  console.error(`Error: Error in fetch. (${errorInFetch})`);
106
123
  }
107
124
  });
108
125
  }
109
126
  function readStream(_0, _1) {
110
- return __async(this, arguments, function* (reader, decoder, stream = true, { signal }, onCompleteCallback) {
127
+ return __async(this, arguments, function* (reader, decoder, stream = true, { signal }, onComplete, onError) {
111
128
  let errorInRead = "";
112
129
  let result = "";
113
130
  while (true) {
@@ -143,12 +160,11 @@ var useLLM = (options) => {
143
160
  if (errorInRead !== "") {
144
161
  setError(errorInRead);
145
162
  reader.cancel();
163
+ if (onError) onError(errorInRead);
146
164
  setIdle(true);
147
165
  }
148
- console.log("about to call callback", onCompleteCallback);
149
- if (onCompleteCallback) {
150
- onCompleteCallback(result);
151
- console.log("called callback");
166
+ if (onComplete) {
167
+ onComplete(result);
152
168
  }
153
169
  return result;
154
170
  });
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "llmasaservice-client",
3
3
  "license": "MIT",
4
- "version": "0.2.5",
4
+ "version": "0.3.0",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.mjs",
7
7
  "types": "dist/index.d.ts",
package/src/useLLM.ts CHANGED
@@ -46,7 +46,8 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
46
46
  * @param stream Determines whether to stream results back in the response property as they return from the service or batch them up and return them all at once in the response property as a string.
47
47
  * @param abortController The AbortController used to abort this request once its started. This allows you to add a stop button to your UI.
48
48
  * @param service The service to use for the request. If null, load balancing will be applied. This is typically only used for testing.
49
- * @param onCompleteCallback The callback function to be called once the stream completes, with the final result string.
49
+ * @param onComplete The callback function to be called once the stream completes, with the final result string.
50
+ * @param onError The callback function to be called if an error occurs, with the error string.
50
51
  * @returns a StreamReader object if stream is true, otherwise a string of the response. Typically this isn't used when streaming, the stream is exposed in the response property.
51
52
  */
52
53
  async function send(
@@ -55,7 +56,9 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
55
56
  stream: boolean = true,
56
57
  abortController: AbortController = new AbortController(),
57
58
  service: string | null = null, // null means use the default service and apply services load balancing
58
- onCompleteCallback?: (result: string) => void
59
+ onComplete?: (result: string) => void,
60
+ onError?: (error: string) => void
61
+
59
62
  ): Promise<ReadableStreamDefaultReader<any> | string | undefined> {
60
63
  setResponse("");
61
64
  setIdle(false);
@@ -94,13 +97,27 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
94
97
  setIdle(false);
95
98
 
96
99
  if (!stream) {
97
- return await readStream(reader, decoder, stream, {
98
- signal: options.signal,
99
- }, onCompleteCallback);
100
+ return await readStream(
101
+ reader,
102
+ decoder,
103
+ stream,
104
+ {
105
+ signal: options.signal,
106
+ },
107
+ onComplete,
108
+ onError
109
+ );
100
110
  } else {
101
- readStream(reader, decoder, stream, {
102
- signal: options.signal,
103
- }, onCompleteCallback);
111
+ readStream(
112
+ reader,
113
+ decoder,
114
+ stream,
115
+ {
116
+ signal: options.signal,
117
+ },
118
+ onComplete,
119
+ onError
120
+ );
104
121
 
105
122
  return reader;
106
123
  }
@@ -111,6 +128,9 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
111
128
 
112
129
  if (errorInFetch !== "") {
113
130
  setError(errorInFetch);
131
+ if (onError) {
132
+ onError(errorInFetch);
133
+ }
114
134
  console.error(`Error: Error in fetch. (${errorInFetch})`);
115
135
  }
116
136
  }
@@ -120,7 +140,8 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
120
140
  decoder: TextDecoder,
121
141
  stream: Boolean = true,
122
142
  { signal: signal }: { signal: AbortSignal },
123
- onCompleteCallback?: (result: string) => void
143
+ onComplete?: (result: string) => void,
144
+ onError?: (error: string) => void
124
145
  ): Promise<string> {
125
146
  let errorInRead = "";
126
147
  let result = "";
@@ -168,13 +189,12 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
168
189
  if (errorInRead !== "") {
169
190
  setError(errorInRead);
170
191
  reader.cancel();
192
+ if (onError) onError(errorInRead);
171
193
  setIdle(true);
172
194
  }
173
195
 
174
- console.log("about to call callback", onCompleteCallback);
175
- if (onCompleteCallback) {
176
- onCompleteCallback(result);
177
- console.log("called callback");
196
+ if (onComplete) {
197
+ onComplete(result);
178
198
  }
179
199
 
180
200
  return result;