@graf-research/llm-runner 0.0.14 → 0.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -8,6 +8,20 @@ Sebuah alternatif untuk mengutilisasi LLM ke programming NodeJS/Javascript. Dide
8
8
  npm install --save @graf-research/llm-runner
9
9
  ```
10
10
 
11
+ ## Supported LLM
12
+
13
+ ```ts
14
+ import { ChatGPTLLM, OllamaLLM, AnthropicLLM, GeminiLLM, LLMRunner } from "@graf-research/llm-runner";
15
+
16
+ const chatgpt: LLMRunner.BaseLLM = new ChatGPTLLM('apikey', '<chatgpt model>');
17
+ const ollama: LLMRunner.BaseLLM = new OllamaLLM('http://my-ollama-server', '<ollama model>');
18
+ const anthropic: LLMRunner.BaseLLM = new AnthropicLLM('apikey', '<anthropic model>');
19
+ const gemini: LLMRunner.BaseLLM = new GeminiLLM('apikey', '<gemini model>');
20
+
21
+ // different platform implementation but same signature BaseLLM class
22
+ const llm: LLMRunner.BaseLLM = ollama;
23
+ ```
24
+
11
25
  ## Example
12
26
 
13
27
  #### Simple
@@ -17,7 +31,15 @@ import { ChatGPTLLM } from "@graf-research/llm-runner";
17
31
 
18
32
  const chat_gpt_api_key = '<apikey>';
19
33
  const chatgpt = new ChatGPTLLM(chat_gpt_api_key, 'gpt-4o-mini');
20
- const response: string = await chatgpt.askNoContext(['Apa ibukota Indonesia?']);
34
+
35
+ // pass string
36
+ const response: string = await chatgpt.askNoContext('Apa ibukota Indonesia?');
37
+
38
+ // pass array of string
39
+ const response: string = await chatgpt.askNoContext([
40
+ 'Saya sedang berada di Indonesia',
41
+ 'apa ibukota negara tersebut?'
42
+ ]);
21
43
  ```
22
44
 
23
45
  #### Simple w/ Context
@@ -66,6 +88,7 @@ import { ChatGPTLLM, GenericLLM } from "@graf-research/llm-runner";
66
88
  const chat_gpt_api_key = '<apikey>';
67
89
  const chatgpt = new ChatGPTLLM(chat_gpt_api_key, 'gpt-4o-mini');
68
90
 
91
+ // can pass string or array of string
69
92
  const response: GenericLLM.StreamResponse = await chatgpt.streamNoContext(['Jelaskan proses metamorfosis pada kupu-kupu']);
70
93
  response.stream((chunk: string, is_complete: boolean) => {
71
94
  if (!is_complete) {
@@ -134,8 +157,9 @@ import { ChatGPTLLM, OllamaLLM, AnthropicLLM, LLMRunner } from "@graf-research/l
134
157
  const chatgpt: LLMRunner.BaseLLM = new ChatGPTLLM('apikey', 'gpt-4o-mini');
135
158
  const ollama: LLMRunner.BaseLLM = new OllamaLLM('http://my-ollama-server', 'deepseek-r1:8b');
136
159
  const anthropic: LLMRunner.BaseLLM = new AnthropicLLM('apikey', 'claude-3-opus-latest');
160
+ const gemini: LLMRunner.BaseLLM = new GeminiLLM('apikey', 'gemini-1.5-flash');
137
161
 
138
- // different platform implemented on but same signature BaseLLM class
162
+ // different platform implementation but same signature BaseLLM class
139
163
  const llm: LLMRunner.BaseLLM = ollama;
140
164
  ```
141
165
 
@@ -149,7 +173,15 @@ const llm: LLMRunner.BaseLLM = ollama;
149
173
  import { ChatGPTLLM, LLMRunner } from "@graf-research/llm-runner";
150
174
 
151
175
  const llm: LLMRunner.BaseLLM = new ChatGPTLLM('<apikey>', 'gpt-4o-mini');
152
- const response: string = await llm.askNoContext(['Apa ibukota Indonesia?']);
176
+
177
+ // pass string
178
+ const response: string = await llm.askNoContext('Apa ibukota Indonesia?');
179
+
180
+ // pass array of string
181
+ const response: string = await llm.askNoContext([
182
+ 'Saya sedang berada di Indonesia',
183
+ 'apa ibukota negara tersebut?'
184
+ ]);
153
185
  ```
154
186
 
155
187
  *With Stream*
@@ -159,6 +191,7 @@ import { ChatGPTLLM, GenericLLM, LLMRunner } from "@graf-research/llm-runner";
159
191
 
160
192
  const llm: LLMRunner.BaseLLM = new ChatGPTLLM('<apikey>', 'gpt-4o-mini');
161
193
 
194
+ // can pass string or array of string
162
195
  const response: GenericLLM.StreamResponse = await chatgpt.streamNoContext(['Jelaskan proses metamorfosis pada kupu-kupu']);
163
196
  response.stream((chunk: string, is_complete: boolean) => {
164
197
  ...
@@ -23,9 +23,9 @@ export declare namespace LLMRunner {
23
23
  * Abstract Base LLM Class
24
24
  */
25
25
  abstract class BaseLLM extends GenericLLM.BaseLLM<ChatSession, Message> {
26
- stream(messages: string[], id_session: string): Promise<GenericLLM.StreamResponse>;
27
- streamNoContext(messages: string[]): Promise<GenericLLM.StreamResponse>;
28
- ask(messages: string[], id_session: string): Promise<string>;
29
- askNoContext(messages: string[]): Promise<string>;
26
+ stream(message_data: string[] | string, id_session: string): Promise<GenericLLM.StreamResponse>;
27
+ streamNoContext(message_data: string[] | string): Promise<GenericLLM.StreamResponse>;
28
+ ask(message_data: string[] | string, id_session: string): Promise<string>;
29
+ askNoContext(message_data: string[] | string): Promise<string>;
30
30
  }
31
31
  }
@@ -74,8 +74,9 @@ var LLMRunner;
74
74
  * Abstract Base LLM Class
75
75
  */
76
76
  class BaseLLM extends generic_llm_1.GenericLLM.BaseLLM {
77
- stream(messages, id_session) {
77
+ stream(message_data, id_session) {
78
78
  return __awaiter(this, void 0, void 0, function* () {
79
+ const messages = Array.isArray(message_data) ? message_data : [message_data];
79
80
  const ac = new AbortController();
80
81
  const stream = new node_stream_1.Readable({ objectMode: true, read() { } });
81
82
  this.streamChat(messages, id_session, stream, ac);
@@ -109,8 +110,9 @@ var LLMRunner;
109
110
  };
110
111
  });
111
112
  }
112
- streamNoContext(messages) {
113
+ streamNoContext(message_data) {
113
114
  return __awaiter(this, void 0, void 0, function* () {
115
+ const messages = Array.isArray(message_data) ? message_data : [message_data];
114
116
  const ac = new AbortController();
115
117
  const stream = new node_stream_1.Readable({ objectMode: true, read() { } });
116
118
  this.streamChat(messages, null, stream, ac);
@@ -142,16 +144,18 @@ var LLMRunner;
142
144
  };
143
145
  });
144
146
  }
145
- ask(messages, id_session) {
147
+ ask(message_data, id_session) {
146
148
  return __awaiter(this, void 0, void 0, function* () {
149
+ const messages = Array.isArray(message_data) ? message_data : [message_data];
147
150
  const res = yield this.chat(messages, id_session);
148
151
  yield this.chat_session_manager.saveMessage(messages, 'user', id_session);
149
152
  yield this.chat_session_manager.saveMessage([res], 'assistant', id_session);
150
153
  return res;
151
154
  });
152
155
  }
153
- askNoContext(messages) {
156
+ askNoContext(message_data) {
154
157
  return __awaiter(this, void 0, void 0, function* () {
158
+ const messages = Array.isArray(message_data) ? message_data : [message_data];
155
159
  return yield this.chat(messages, null);
156
160
  });
157
161
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@graf-research/llm-runner",
3
- "version": "0.0.14",
3
+ "version": "0.0.15",
4
4
  "main": "dist/index.js",
5
5
  "scripts": {
6
6
  "build": "rm -rf dist && tsc",