@graf-research/llm-runner 0.0.11 → 0.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/README.md +9 -9
  2. package/package.json +1 -1
package/README.md CHANGED
@@ -27,12 +27,12 @@ import { ChatGPTLLM } from "@graf-research/llm-runner";
27
27
 
28
28
  const chat_gpt_api_key = '<apikey>';
29
29
  const chatgpt = new ChatGPTLLM(chat_gpt_api_key);
30
- const session_id: string = 'sample-id';
30
+ const session = await chatgpt.chat_session_manager.newSession();
31
31
 
32
- const response1: string = await chatgpt.ask(['Apa ibukota Indonesia?'], session_id);
32
+ const response1: string = await chatgpt.ask(['Apa ibukota Indonesia?'], session.id);
33
33
  console.log(response1);
34
34
 
35
- const response2: string = await chatgpt.ask(['Apa yang saya tanyakan sebelumnya?'], session_id);
35
+ const response2: string = await chatgpt.ask(['Apa yang saya tanyakan sebelumnya?'], session.id);
36
36
  console.log(response2);
37
37
  ```
38
38
 
@@ -172,9 +172,9 @@ response.stream((chunk: string, is_complete: boolean) => {
172
172
  import { ChatGPTLLM, LLMRunner } from "@graf-research/llm-runner";
173
173
 
174
174
  const llm: LLMRunner.BaseLLM = new ChatGPTLLM('<apikey>');
175
- const session_id = '1';
176
- const response1: string = await llm.ask(['Apa ibukota Indonesia?'], session_id);
177
- const response2: string = await llm.ask(['Apa yang saya tanyakan sebelumnya?'], session_id);
175
+ const session = await llm.chat_session_manager.newSession();
176
+ const response1: string = await llm.ask(['Apa ibukota Indonesia?'], session.id);
177
+ const response2: string = await llm.ask(['Apa yang saya tanyakan sebelumnya?'], session.id);
178
178
  // response2 will remember conversation history/context
179
179
  ```
180
180
 
@@ -184,12 +184,12 @@ const response2: string = await llm.ask(['Apa yang saya tanyakan sebelumnya?'],
184
184
  import { ChatGPTLLM, LLMRunner } from "@graf-research/llm-runner";
185
185
 
186
186
  const llm: LLMRunner.BaseLLM = new ChatGPTLLM('<apikey>');
187
- const session_id: string = 'sample-id';
187
+ const session = await llm.chat_session_manager.newSession();
188
188
 
189
- const response1: GenericLLM.StreamResponse = await chatgpt.stream(['Jelaskan proses metamorfosis pada kupu-kupu'], session_id);
189
+ const response1: GenericLLM.StreamResponse = await chatgpt.stream(['Jelaskan proses metamorfosis pada kupu-kupu'], session.id);
190
190
  response1.stream(async (chunk1: string, is_complete1: boolean) => {
191
191
  if (is_complete1) {
192
- const response2: GenericLLM.StreamResponse = await chatgpt.stream(['Apa yang saya tanyakan sebelumnya?'], session_id);
192
+ const response2: GenericLLM.StreamResponse = await chatgpt.stream(['Apa yang saya tanyakan sebelumnya?'], session.id);
193
193
  response2.stream(async (chunk2: string, is_complete2: boolean) => {
194
194
  ...
195
195
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@graf-research/llm-runner",
3
- "version": "0.0.11",
3
+ "version": "0.0.12",
4
4
  "main": "dist/index.js",
5
5
  "scripts": {
6
6
  "build": "rm -rf dist && tsc",