llmasaservice-client 0.0.11 → 0.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.mts +2 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +6 -5
- package/dist/index.mjs +6 -5
- package/package.json +1 -1
- package/src/LLMAsAService.tsx +4 -1
- package/src/useLLM.ts +1 -1
package/CHANGELOG.md
CHANGED
package/dist/index.d.mts
CHANGED
|
@@ -9,12 +9,14 @@ type LLMAsAServiceCustomer = {
|
|
|
9
9
|
interface LLMServiceType {
|
|
10
10
|
project_id: string | undefined;
|
|
11
11
|
customer?: LLMAsAServiceCustomer;
|
|
12
|
+
url?: string;
|
|
12
13
|
}
|
|
13
14
|
declare const LLMService: React.Context<LLMServiceType | undefined>;
|
|
14
15
|
interface UserProviderProps {
|
|
15
16
|
children: ReactNode;
|
|
16
17
|
project_id: string | undefined;
|
|
17
18
|
customer?: LLMAsAServiceCustomer;
|
|
19
|
+
url?: string;
|
|
18
20
|
}
|
|
19
21
|
declare const LLMServiceProvider: React.FC<UserProviderProps>;
|
|
20
22
|
|
package/dist/index.d.ts
CHANGED
|
@@ -9,12 +9,14 @@ type LLMAsAServiceCustomer = {
|
|
|
9
9
|
interface LLMServiceType {
|
|
10
10
|
project_id: string | undefined;
|
|
11
11
|
customer?: LLMAsAServiceCustomer;
|
|
12
|
+
url?: string;
|
|
12
13
|
}
|
|
13
14
|
declare const LLMService: React.Context<LLMServiceType | undefined>;
|
|
14
15
|
interface UserProviderProps {
|
|
15
16
|
children: ReactNode;
|
|
16
17
|
project_id: string | undefined;
|
|
17
18
|
customer?: LLMAsAServiceCustomer;
|
|
19
|
+
url?: string;
|
|
18
20
|
}
|
|
19
21
|
declare const LLMServiceProvider: React.FC<UserProviderProps>;
|
|
20
22
|
|
package/dist/index.js
CHANGED
|
@@ -65,9 +65,10 @@ var LLMService = (0, import_react.createContext)(void 0);
|
|
|
65
65
|
var LLMServiceProvider = ({
|
|
66
66
|
children,
|
|
67
67
|
project_id,
|
|
68
|
-
customer
|
|
68
|
+
customer,
|
|
69
|
+
url = "https://chat.llmasaservice.io/"
|
|
69
70
|
}) => {
|
|
70
|
-
return /* @__PURE__ */ import_react.default.createElement(LLMService.Provider, { value: { project_id, customer } }, children);
|
|
71
|
+
return /* @__PURE__ */ import_react.default.createElement(LLMService.Provider, { value: { project_id, customer, url } }, children);
|
|
71
72
|
};
|
|
72
73
|
|
|
73
74
|
// src/useLLM.ts
|
|
@@ -90,11 +91,10 @@ var useLLM = (options) => {
|
|
|
90
91
|
};
|
|
91
92
|
function send(_0) {
|
|
92
93
|
return __async(this, arguments, function* (prompt, messages = [], stream = true, abortController = new AbortController(), service = null) {
|
|
93
|
-
var _a, _b, _c;
|
|
94
|
+
var _a, _b, _c, _d;
|
|
94
95
|
setResponse("");
|
|
95
96
|
setIdle(false);
|
|
96
97
|
let errorInFetch = "";
|
|
97
|
-
const url = "https://chat.llmasaservice.io/";
|
|
98
98
|
const responseBody = JSON.stringify({
|
|
99
99
|
projectId: (_a = context == null ? void 0 : context.project_id) != null ? _a : "",
|
|
100
100
|
serviceId: service,
|
|
@@ -115,11 +115,12 @@ var useLLM = (options) => {
|
|
|
115
115
|
body: responseBody
|
|
116
116
|
};
|
|
117
117
|
try {
|
|
118
|
+
const url = (_c = context == null ? void 0 : context.url) != null ? _c : "https://chat.llmasaservice.io/";
|
|
118
119
|
const response2 = yield fetch(url, options2);
|
|
119
120
|
if (!response2.ok) {
|
|
120
121
|
errorInFetch = `Error: Network error for service. (${response2.status} ${response2.statusText})`;
|
|
121
122
|
} else {
|
|
122
|
-
const reader = (
|
|
123
|
+
const reader = (_d = response2 == null ? void 0 : response2.body) == null ? void 0 : _d.getReader();
|
|
123
124
|
const decoder = new TextDecoder("utf-8");
|
|
124
125
|
setIdle(false);
|
|
125
126
|
if (!stream) {
|
package/dist/index.mjs
CHANGED
|
@@ -28,9 +28,10 @@ var LLMService = createContext(void 0);
|
|
|
28
28
|
var LLMServiceProvider = ({
|
|
29
29
|
children,
|
|
30
30
|
project_id,
|
|
31
|
-
customer
|
|
31
|
+
customer,
|
|
32
|
+
url = "https://chat.llmasaservice.io/"
|
|
32
33
|
}) => {
|
|
33
|
-
return /* @__PURE__ */ React.createElement(LLMService.Provider, { value: { project_id, customer } }, children);
|
|
34
|
+
return /* @__PURE__ */ React.createElement(LLMService.Provider, { value: { project_id, customer, url } }, children);
|
|
34
35
|
};
|
|
35
36
|
|
|
36
37
|
// src/useLLM.ts
|
|
@@ -53,11 +54,10 @@ var useLLM = (options) => {
|
|
|
53
54
|
};
|
|
54
55
|
function send(_0) {
|
|
55
56
|
return __async(this, arguments, function* (prompt, messages = [], stream = true, abortController = new AbortController(), service = null) {
|
|
56
|
-
var _a, _b, _c;
|
|
57
|
+
var _a, _b, _c, _d;
|
|
57
58
|
setResponse("");
|
|
58
59
|
setIdle(false);
|
|
59
60
|
let errorInFetch = "";
|
|
60
|
-
const url = "https://chat.llmasaservice.io/";
|
|
61
61
|
const responseBody = JSON.stringify({
|
|
62
62
|
projectId: (_a = context == null ? void 0 : context.project_id) != null ? _a : "",
|
|
63
63
|
serviceId: service,
|
|
@@ -78,11 +78,12 @@ var useLLM = (options) => {
|
|
|
78
78
|
body: responseBody
|
|
79
79
|
};
|
|
80
80
|
try {
|
|
81
|
+
const url = (_c = context == null ? void 0 : context.url) != null ? _c : "https://chat.llmasaservice.io/";
|
|
81
82
|
const response2 = yield fetch(url, options2);
|
|
82
83
|
if (!response2.ok) {
|
|
83
84
|
errorInFetch = `Error: Network error for service. (${response2.status} ${response2.statusText})`;
|
|
84
85
|
} else {
|
|
85
|
-
const reader = (
|
|
86
|
+
const reader = (_d = response2 == null ? void 0 : response2.body) == null ? void 0 : _d.getReader();
|
|
86
87
|
const decoder = new TextDecoder("utf-8");
|
|
87
88
|
setIdle(false);
|
|
88
89
|
if (!stream) {
|
package/package.json
CHANGED
package/src/LLMAsAService.tsx
CHANGED
|
@@ -10,6 +10,7 @@ export type LLMAsAServiceCustomer = {
|
|
|
10
10
|
export interface LLMServiceType {
|
|
11
11
|
project_id: string | undefined;
|
|
12
12
|
customer?: LLMAsAServiceCustomer;
|
|
13
|
+
url?: string;
|
|
13
14
|
}
|
|
14
15
|
|
|
15
16
|
export const LLMService = createContext<LLMServiceType | undefined>(undefined);
|
|
@@ -18,15 +19,17 @@ interface UserProviderProps {
|
|
|
18
19
|
children: ReactNode;
|
|
19
20
|
project_id: string | undefined;
|
|
20
21
|
customer?: LLMAsAServiceCustomer;
|
|
22
|
+
url?: string;
|
|
21
23
|
}
|
|
22
24
|
|
|
23
25
|
export const LLMServiceProvider: React.FC<UserProviderProps> = ({
|
|
24
26
|
children,
|
|
25
27
|
project_id,
|
|
26
28
|
customer,
|
|
29
|
+
url = "https://chat.llmasaservice.io/"
|
|
27
30
|
}) => {
|
|
28
31
|
return (
|
|
29
|
-
<LLMService.Provider value={{ project_id, customer }}>
|
|
32
|
+
<LLMService.Provider value={{ project_id, customer, url }}>
|
|
30
33
|
{children}
|
|
31
34
|
</LLMService.Provider>
|
|
32
35
|
);
|
package/src/useLLM.ts
CHANGED
|
@@ -57,7 +57,6 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
|
|
|
57
57
|
setIdle(false);
|
|
58
58
|
|
|
59
59
|
let errorInFetch = "";
|
|
60
|
-
const url = "https://chat.llmasaservice.io/";
|
|
61
60
|
|
|
62
61
|
const responseBody = JSON.stringify({
|
|
63
62
|
projectId: context?.project_id ?? "",
|
|
@@ -81,6 +80,7 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
|
|
|
81
80
|
};
|
|
82
81
|
|
|
83
82
|
try {
|
|
83
|
+
const url = context?.url ?? "https://chat.llmasaservice.io/";
|
|
84
84
|
const response = await fetch(url, options);
|
|
85
85
|
if (!response.ok) {
|
|
86
86
|
errorInFetch = `Error: Network error for service. (${response.status} ${response.statusText})`;
|