openkbs 0.0.19 → 0.0.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/Docs.md CHANGED
@@ -9,9 +9,11 @@ src/
9
9
  │ ├── onRequest.js // Handles incoming user messages
10
10
  │ ├── onResponse.js // Handles outgoing LLM messages
11
11
  │ ├── onPublicAPIRequest.js // Handles public API requests
12
+ │ ├── onAddMessages.js // Handles messages added to the chat (NEW)
12
13
  │ ├── onRequest.json // Dependencies for onRequest handler
13
14
  │ ├── onResponse.json // Dependencies for onResponse handler
14
- └── onPublicAPIRequest.json // Dependencies for onPublicAPIRequest handler
15
+ ├── onPublicAPIRequest.json // Dependencies for onPublicAPIRequest handler
16
+ │ └── onAddMessages.json // Dependencies for onAddMessages handler (NEW)
15
17
  │── Frontend/
16
18
  │ ├── contentRender.js // Custom rendering logic for chat messages
17
19
  │ └── contentRender.json // Dependencies for the contentRender module
@@ -75,7 +77,7 @@ export const handler = async (event) => {
75
77
  const actions = getActions({
76
78
  _meta_actions: event?.payload?.messages?.length > maxSelfInvokeMessagesCount
77
79
  ? ["REQUEST_CHAT_MODEL_EXCEEDED"]
78
- : []
80
+ : ["REQUEST_CHAT_MODEL"]
79
81
  });
80
82
 
81
83
  for (let [regex, action] of actions) {
@@ -220,7 +222,6 @@ module.exports = {
220
222
  if (!kbId) return { error: "kbId is not provided" }
221
223
 
222
224
  try {
223
- // Encrypt specific fields if needed
224
225
  const myItem = {};
225
226
  for (const attribute of attributes) {
226
227
  const { attrName, encrypted } = attribute;
@@ -248,52 +249,98 @@ module.exports = {
248
249
 
249
250
  ```javascript
250
251
  // Example creating a "feedback" item
251
- const kbId = 'your-kb-id'; // Replace with your actual kbId
252
- const data = {
253
- action: "createItem",
254
- kbId: kbId, // knowledge base ID
255
- itemType: "feedback",
256
- attributes: [
257
- { attrType: "keyword1", attrName: "name", encrypted: true },
258
- { attrType: "text1", attrName: "feedbackText", encrypted: false }
259
- ],
260
- item: {
261
- name: "John Doe",
262
- feedbackText: "Great product!"
263
- }
264
- };
252
+ const createFeedback = async (kbId, name, text) => (
253
+ await fetch('https://chat.openkbs.com/publicAPIRequest', {
254
+ method: 'POST',
255
+ headers: { 'Content-Type': 'application/json' },
256
+ body: JSON.stringify({
257
+ action: "createItem",
258
+ kbId,
259
+ itemType: "feedback",
260
+ attributes: [
261
+ { attrType: "keyword1", attrName: "name", encrypted: true },
262
+ { attrType: "text1", attrName: "feedbackText", encrypted: false }
263
+ ],
264
+ item: { name, feedbackText: text }
265
+ })
266
+ })
267
+ ).json();
268
+ ```
265
269
 
266
- fetch('https://chat.openkbs.com/publicAPIRequest', { // Call the public API endpoint
267
- method: 'POST',
268
- headers: { 'Content-Type': 'application/json' },
269
- body: JSON.stringify(data)
270
- })
271
- .then(response => response.json())
272
- .then(data => {
273
- if (data.error) {
274
- console.error("Error creating item:", data.error);
275
- // Handle error (e.g., display error message)
276
- } else {
277
- console.log("Item created successfully:", data);
278
- // Handle success (e.g., update UI)
279
- }
280
- })
281
- .catch(error => {
282
- console.error("Network error:", error);
283
- // Handle network errors
284
- });
270
+ By utilizing `onPublicAPIRequest` and `openkbs.items`, you can build powerful integrations that allow external systems to store and manage data within your OpenKBS application without compromising security. This approach is especially valuable for scenarios like form submissions, webhooks, or any situation where direct, unauthenticated access to data storage is required. Remember to carefully consider security implications and implement necessary precautions.
271
+
272
+ ### `onAddMessages` Event Handler:
285
273
 
274
+ The `onAddMessages` handler allows you to intercept and process messages *as they are added to the chat*. This handler is triggered *after* the `onRequest` handler but *before* the message is sent to the LLM. It's particularly useful for scenarios where a third-party system or service sends messages directly to your OpenKBS application to perform an action.
286
275
 
276
+ **Example: User moderation:**
277
+
278
+ **1. Third-Party Service API request:**
279
+
280
+ ```javascript
281
+ // Example of a third-party system sending a chat message to OpenKBS
282
+ axios.post('https://chat.openkbs.com/', {
283
+ action: "chatAddMessages",
284
+ chatId: 'NSFW_CHAT_ID', // the chat id created to log and process NSFW message
285
+ messages: [{
286
+ role: "system",
287
+ content: JSON.stringify({
288
+ labels: ['adult', 'explicit'],
289
+ fileName: 'image.jpg',
290
+ path: '/uploads/image.jpg'
291
+ }),
292
+ msgId: `${Date.now()}-000000`
293
+ }],
294
+ apiKey: "YOUR_API_KEY",
295
+ kbId: "YOUR_KB_ID"
296
+ }, {
297
+ headers: { 'Content-Type': 'application/json' }
298
+ });
287
299
  ```
288
300
 
289
- By utilizing `onPublicAPIRequest` and `openkbs.items`, you can build powerful integrations that allow external systems to store and manage data within your OpenKBS application without compromising security. This approach is especially valuable for scenarios like form submissions, webhooks, or any situation where direct, unauthenticated access to data storage is required. Remember to carefully consider security implications and implement necessary precautions.
301
+ **2. `onAddMessages` Handler:**
302
+
303
+ ```javascript
304
+ // src/Events/onAddMessages.js
305
+ import * as actions from './actions.js';
306
+
307
+ export const handler = async (event) => {
308
+ const { messages, chatId } = event.payload;
309
+ let msgData;
310
+
311
+ // NSFW Chat Handler
312
+ if (chatId === 'NSFW_CHAT_ID') { // Check if the message is for the NSFW chat
313
+ try {
314
+ msgData = JSON.parse(messages[0].content); // Parse the message content (expecting JSON)
315
+ const { data } = await actions.getUser([null, msgData.kbId]); // Get user information
316
+ await actions.warnAccount([null, data.user.accountId, msgData?.labels]); // Issue a warning
317
+ await actions.deleteFile([null, msgData.path]); // Delete the offending file
318
+
319
+ // Return a system message confirming the action
320
+ return [
321
+ ...messages,
322
+ {
323
+ role: 'system',
324
+ msgId: Date.now() + '000000',
325
+ content: `### 👮‍♀️ System Actions:\nWarning issued and content removed`
326
+ }
327
+ ];
328
+ } catch (e) {
329
+ console.error("Error processing NSFW content:", e);
330
+ }
331
+ }
332
+
333
+ return messages; // Return messages unchanged if no action is taken
334
+ };
335
+
336
+ ```
290
337
 
291
- **Dependencies (onRequest.json, onResponse.json, onPublicAPIRequest.json):**
338
+ **Dependencies (onRequest.json, onResponse.json, etc.):**
292
339
 
293
340
  These files specify the NPM package dependencies required for the respective event handlers. They follow the standard `package.json` format.
294
341
 
295
342
  ```json
296
- // src/Events/onRequest.json, src/Events/onResponse.json, src/Events/onPublicAPIRequest.json
343
+ // src/Events/*.json
297
344
  {
298
345
  "dependencies": {
299
346
  "your-package": "^1.0.0"
@@ -346,7 +393,7 @@ The `openkbs` object provides a set of utility functions and services to interac
346
393
 
347
394
  * **`openkbs.detectLanguage(text, params)`:** Detects the language of the provided text.
348
395
 
349
- * **`openkbs.textToSpeech(text, params)`:** Converts text to speech.
396
+ * **`openkbs.textToSpeech(text, params)`:** Converts text to speech. Returns `response.audioContent` which automatically plays in the chat interface.
350
397
 
351
398
  * **`openkbs.encrypt(plaintext)`:** Encrypts data using the provided AES key.
352
399
 
@@ -550,7 +597,7 @@ The dependencies marked as `(fixed)` are not installed as additional dependencie
550
597
 
551
598
  These components and utilities are accessible directly within your `onRenderChatMessage` function, streamlining your custom development process.
552
599
 
553
- ### `msgIndex`
600
+ ### msgIndex
554
601
  ```javascript
555
602
  const onRenderChatMessage = async (params) => {
556
603
  const { msgIndex, messages } = params;
@@ -560,7 +607,7 @@ const onRenderChatMessage = async (params) => {
560
607
  };
561
608
  ```
562
609
 
563
- ### `messages`
610
+ ### messages
564
611
  ```javascript
565
612
  const onRenderChatMessage = async (params) => {
566
613
  const { messages } = params;
@@ -571,7 +618,7 @@ const onRenderChatMessage = async (params) => {
571
618
  };
572
619
  ```
573
620
 
574
- ### `setMessages`
621
+ ### setMessages
575
622
  ```javascript
576
623
  const onRenderChatMessage = async (params) => {
577
624
  const { setMessages, messages } = params;
@@ -580,7 +627,7 @@ const onRenderChatMessage = async (params) => {
580
627
  };
581
628
  ```
582
629
 
583
- ### `KB`
630
+ ### KB
584
631
  ```javascript
585
632
  const onRenderChatMessage = async (params) => {
586
633
  const { KB } = params;
@@ -589,7 +636,7 @@ const onRenderChatMessage = async (params) => {
589
636
  };
590
637
  ```
591
638
 
592
- ### `chatContainerRef`
639
+ ### chatContainerRef
593
640
  ```javascript
594
641
  const onRenderChatMessage = async (params) => {
595
642
  const { chatContainerRef } = params;
@@ -599,7 +646,7 @@ const onRenderChatMessage = async (params) => {
599
646
  };
600
647
  ```
601
648
 
602
- ### `RequestChatAPI`
649
+ ### RequestChatAPI
603
650
  ```javascript
604
651
  const onRenderChatMessage = async (params) => {
605
652
  const { RequestChatAPI, messages } = params;
@@ -608,7 +655,7 @@ const onRenderChatMessage = async (params) => {
608
655
  };
609
656
  ```
610
657
 
611
- ### `setSystemAlert`
658
+ ### setSystemAlert
612
659
  ```javascript
613
660
  const onRenderChatMessage = async (params) => {
614
661
  const { setSystemAlert } = params;
@@ -616,7 +663,7 @@ const onRenderChatMessage = async (params) => {
616
663
  };
617
664
  ```
618
665
 
619
- ### `setBlockingLoading`
666
+ ### setBlockingLoading
620
667
  ```javascript
621
668
  const onRenderChatMessage = async (params) => {
622
669
  const { setBlockingLoading } = params;
@@ -626,7 +673,7 @@ const onRenderChatMessage = async (params) => {
626
673
  };
627
674
  ```
628
675
 
629
- ### `blockingLoading`
676
+ ### blockingLoading
630
677
  ```javascript
631
678
  const onRenderChatMessage = async (params) => {
632
679
  const { blockingLoading } = params;
@@ -636,7 +683,7 @@ const onRenderChatMessage = async (params) => {
636
683
  };
637
684
  ```
638
685
 
639
- ### `sendButtonRef`
686
+ ### sendButtonRef
640
687
  ```javascript
641
688
  const onRenderChatMessage = async (params) => {
642
689
  const { sendButtonRef } = params;
@@ -646,7 +693,7 @@ const onRenderChatMessage = async (params) => {
646
693
  };
647
694
  ```
648
695
 
649
- ### `sendButtonRippleRef`
696
+ ### sendButtonRippleRef
650
697
  ```javascript
651
698
  const onRenderChatMessage = async (params) => {
652
699
  const { sendButtonRippleRef } = params;
@@ -656,7 +703,7 @@ const onRenderChatMessage = async (params) => {
656
703
  };
657
704
  ```
658
705
 
659
- ### `setInputValue`
706
+ ### setInputValue
660
707
  ```javascript
661
708
  const onRenderChatMessage = async (params) => {
662
709
  const { setInputValue } = params;
@@ -664,7 +711,7 @@ const onRenderChatMessage = async (params) => {
664
711
  };
665
712
  ```
666
713
 
667
- ### `renderSettings`
714
+ ### renderSettings
668
715
  ```javascript
669
716
  const onRenderChatMessage = async (params) => {
670
717
  const { renderSettings } = params;
@@ -672,7 +719,7 @@ const onRenderChatMessage = async (params) => {
672
719
  };
673
720
  ```
674
721
 
675
- ### `axios`
722
+ ### axios
676
723
  ```javascript
677
724
  const onRenderChatMessage = async (params) => {
678
725
  const { axios } = params;
@@ -681,7 +728,7 @@ const onRenderChatMessage = async (params) => {
681
728
  };
682
729
  ```
683
730
 
684
- ### `itemsAPI`
731
+ ### itemsAPI
685
732
  ```javascript
686
733
  const onRenderChatMessage = async (params) => {
687
734
  const { itemsAPI } = params;
@@ -690,7 +737,7 @@ const onRenderChatMessage = async (params) => {
690
737
  };
691
738
  ```
692
739
 
693
- ### `indexedDB`
740
+ ### indexedDB
694
741
  ```javascript
695
742
  const onRenderChatMessage = async (params) => {
696
743
  const { indexedDB } = params;
@@ -699,7 +746,7 @@ const onRenderChatMessage = async (params) => {
699
746
  };
700
747
  ```
701
748
 
702
- ### `generateMsgId`
749
+ ### generateMsgId
703
750
  ```javascript
704
751
  const onRenderChatMessage = async (params) => {
705
752
  const { generateMsgId } = params;
@@ -708,7 +755,7 @@ const onRenderChatMessage = async (params) => {
708
755
  };
709
756
  ```
710
757
 
711
- ### `kbUserData`
758
+ ### kbUserData
712
759
  ```javascript
713
760
  const onRenderChatMessage = async (params) => {
714
761
  const { kbUserData } = params;
package/INSTALL.md ADDED
@@ -0,0 +1,36 @@
1
+ - **Download CLI Binary**
2
+
3
+ - **Linux (x64):**
4
+
5
+ ```bash
6
+
7
+ wget -O ~/Downloads/openkbs https://downloads.openkbs.com/cli/linux/openkbs && chmod +x ~/Downloads/openkbs && sudo mv ~/Downloads/openkbs /usr/local/bin/openkbs
8
+
9
+ ```
10
+
11
+ - **Windows (x64):**
12
+
13
+ ```powershell
14
+
15
+ Invoke-WebRequest -Uri "https://downloads.openkbs.com/cli/windows/openkbs.exe" -OutFile "$Env:USERPROFILE\Downloads\openkbs.exe"
16
+
17
+ $Env:Path += ";$Env:USERPROFILE\Downloads"
18
+
19
+ ```
20
+
21
+
22
+ - **Mac (new M series):**
23
+
24
+ ```bash
25
+
26
+ curl -o ~/Downloads/openkbs https://downloads.openkbs.com/cli/macos/openkbs && mkdir -p /usr/local/bin && chmod +x ~/Downloads/openkbs && sudo mv ~/Downloads/openkbs /usr/local/bin/openkbs
27
+
28
+ ```
29
+
30
+ - **Mac (old models):**
31
+
32
+ ```bash
33
+
34
+ curl -o ~/Downloads/openkbs https://downloads.openkbs.com/cli/macos/openkbs-x64 && mkdir -p /usr/local/bin && chmod +x ~/Downloads/openkbs && sudo mv ~/Downloads/openkbs /usr/local/bin/openkbs
35
+
36
+ ```
package/ON_PREMISES.md ADDED
@@ -0,0 +1,215 @@
1
+
2
+ ### Running the Backend Locally (On-Premises)
3
+
4
+ To run the backend services of your AI application locally, follow these steps. This allows you to manage chat services, code execution, and AI LLM services on your own infrastructure.
5
+
6
+ #### Running the Chat Service Locally
7
+
8
+ 1. **Start the Chat Service**:
9
+ - Open a new terminal and navigate to the root folder of your application.
10
+ - Run the following command:
11
+
12
+ ```bash
13
+ npm run chat
14
+ ```
15
+
16
+ - If LocalStack is not installed, you will receive instructions on how to install it based on your platform.
17
+ - Open another terminal, navigate to `/tmp`, and install LocalStack using the suggested commands and retrun `npm run chat`
18
+
19
+
20
+ 4. **Configure OpenAI Key**:
21
+ - Enter your `OPENAI_KEY` when prompted. This key will be stored at `~/.openkbs/.env`.
22
+
23
+ 5. **Access the Local Chat Service**:
24
+ - Refresh your browser at `http://{kbId}.apps.localhost:38593/chat`.
25
+ - You will see "On-Premises" in green text, indicating that your OpenKBS instance is using the local chat server to communicate with the OpenAI streaming API.
26
+ - You can remove the cloud models options by setting `"enableCloudModels": false` in `config.json`
27
+
28
+ #### Running the Code Execution Service Locally
29
+
30
+ 1. **Start the Code Execution Service**:
31
+ - Open another terminal tab, navigate to the root folder of your KB app, and run:
32
+
33
+ ```bash
34
+ npm run code
35
+ ```
36
+
37
+ 2. **Enter Secrets**:
38
+ - You may be prompted to enter any secret placeholders in your `./src/Events/actions.js`. By default, this includes `googlesearch_api_key` and `googlesearch_engine_id`.
39
+ - You can press enter to skip, but for using Google Search as an AI tool, it's recommended to fill them. Google provides 100 free searches per day.
40
+
41
+ Congratulations! The LLM can now execute NodeJS code directly on your machine!
42
+
43
+ #### Enhancing Your Application with Code Execution
44
+
45
+ To utilize the code execution feature, follow these steps:
46
+
47
+ 1. **Update `contentRender.js`**:
48
+ - Modify your local `contentRender.js` file to match the version found at [contentRender.js](./examples/cloud-master/contentRender.js). This update will provide the necessary UI components for local code execution and response rendering.
49
+
50
+ 2. **Update `instructions.txt`**:
51
+ - Edit your local `instructions.txt` file to include the instructions found at [instructions.txt](./examples/cloud-master/instructions.txt). These instructions will guide the LLM on how to output code and other API commands for execution by the OpenKBS framework.
52
+
53
+ 3. **Push the new instructions**:
54
+ - we have to push the instructions which are stored encrypted at OpenKBS registry:
55
+ ```bash
56
+ openkbs push origin app/instructions.txt
57
+ ```
58
+ - push to localstack to build and deploy all Node.js events - ./src/Events
59
+ ```bash
60
+ openkbs push localstack
61
+ ```
62
+ 4. **Requesting the AI to Perform Tasks on Your PC and AWS Cloud**:
63
+ - Instruct the AI to list your desktop files, review the code, click `execute`, and click `send`:
64
+ ```
65
+ List my desktop files
66
+ ```
67
+ - Instruct the AI to create an S3 bucket and backup your desktop images to it:
68
+ ```
69
+ Create an S3 bucket and back up my desktop images to it
70
+ ```
71
+ ![backup.png](examples%2Fcloud-master%2Fbackup.png)
72
+ ---
73
+
74
+ ## Installing openkbs-ai-server and Integrating Llama 3.1 and Stable Diffusion 3 Locally
75
+
76
+ ![llama-loaded.png](examples%2Fcloud-master%2Fllama-loaded.png)
77
+
78
+ To set up the `openkbs-ai-server` and integrate advanced AI models like Llama 3.1 and Stable Diffusion 3 on your local machine, follow the steps outlined below.
79
+
80
+ ### Prerequisites
81
+
82
+ Ensure you have the following prerequisites installed and configured:
83
+
84
+ - Ubuntu 22.04 or a compatible Linux distribution.
85
+ - Python 3.10 and virtual environment tools.
86
+ - Node.js and npm.
87
+ - NVIDIA or AMD GPU drivers, depending on your hardware.
88
+
89
+ Please follow the installation on [GitHub](https://github.com/open-kbs/openkbs-ai-server).
90
+
91
+ ### Step 1: Checkout, Build, and Run
92
+
93
+ Clone the `openkbs-ai-server` repository and set up the environment:
94
+
95
+ ```bash
96
+ git clone git@github.com:open-kbs/openkbs-ai-server.git
97
+ cd openkbs-ai-server/cluster
98
+ npm i
99
+ cd ..
100
+ python -m venv .env
101
+ source .env/bin/activate
102
+ ```
103
+
104
+ **IMPORTANT: SELECT THE CORRECT GPU INSTRUCTIONS BELOW. DO NOT EXECUTE BOTH.**
105
+
106
+ #### **FOR AMD GPUS:**
107
+
108
+ **ONLY FOLLOW THESE INSTRUCTIONS IF YOU HAVE AN AMD GPU.**
109
+
110
+ Install necessary libraries and Python packages:
111
+
112
+ ```bash
113
+ sudo apt-get install -y libjpeg-dev libpng-dev
114
+ pip install wheel setuptools
115
+ pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/rocm6.1/
116
+ pip install -r ./models/requirements_AMD.txt
117
+ ```
118
+
119
+ #### **FOR NVIDIA GPUS:**
120
+
121
+ **ONLY FOLLOW THESE INSTRUCTIONS IF YOU HAVE AN NVIDIA GPU.**
122
+
123
+ Install the required Python packages:
124
+
125
+ ```bash
126
+ pip install torch
127
+ pip install -r ./models/requirements_NVIDIA.txt
128
+ ```
129
+
130
+ ### Step 2: Configure Hugging Face Authentication
131
+
132
+ Log in to Hugging Face to access the AI models:
133
+
134
+ ```bash
135
+ huggingface-cli login
136
+ ```
137
+
138
+ Enter your Hugging Face token when prompted.
139
+
140
+ ### Step 3: Install Global Node.js Packages
141
+
142
+ Install global Node.js packages required for running the server:
143
+
144
+ ```bash
145
+ npm install -g pm2 nodemon react-scripts
146
+ ```
147
+
148
+ ### Step 4: Start the AI Server
149
+
150
+ Launch the AI server using the provided script:
151
+
152
+ ```bash
153
+ ./start.sh
154
+ ```
155
+
156
+ This command will start both the frontend and backend services using pm2. Your default web browser should automatically open to [http://localhost:7080/register](http://localhost:7080/register), where you can register the admin account for the AI server.
157
+
158
+ ### Step 5: Install AI Models
159
+
160
+ In the AI server admin panel, search for and install the following models:
161
+
162
+ - **Llama-3.1-8B-Instruct**: Ensure you have access to [Llama-3.1-8B-Instruct](https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct) on Hugging Face.
163
+ - **Stable Diffusion 3**: Ensure you have access to [Stable Diffusion 3](https://huggingface.co/stabilityai/stable-diffusion-3-medium) on Hugging Face.
164
+
165
+ After installation, restart your chat server to apply the changes.
166
+
167
+ ### Step 6: Integrate Stable Diffusion under Events actions, so that Llama can call it
168
+
169
+ Go to your app root folder
170
+ ```bash
171
+ cd my-pc-agent
172
+ ```
173
+
174
+ Add to `./src/Events/actions.js`
175
+ ```javascript
176
+ [/\/?textToImage\("(.*)"\)/, async (match) => {
177
+ try {
178
+ const response = await axios.get(`http://localhost:8080/pipe/stabilityai--stable-diffusion-3-medium-diffusers--default?prompt=${encodeURIComponent(match[1])}`, {
179
+ responseType: 'arraybuffer'
180
+ });
181
+
182
+ const base64Data = Buffer.from(response.data, 'binary').toString('base64');
183
+ const contentType = response.headers['content-type'];
184
+ const imageSrc = `data:${contentType};base64,${base64Data}`;
185
+
186
+ return { type: 'SAVED_CHAT_IMAGE', imageSrc, ...meta };
187
+ } catch (error) {
188
+ console.error('Error fetching image:', error);
189
+ throw error; // or handle the error as needed
190
+ }
191
+ }]
192
+ ```
193
+
194
+ Push the changes:
195
+ ```bash
196
+ openkbs push localstack
197
+ ```
198
+ ### Step 7: Test Llama agent
199
+
200
+ Once the models are installed and the server is running, select `Llama-3.1-8B-Inst` in your Chat Models selection and type in the chat:
201
+
202
+ ```
203
+ Hey Llama, search Google for the latest AI news and wait, then generate news image. Finally, use a template function to create an HTML page hosted on the S3 bucket 'ai-news-openkbs'.
204
+ ```
205
+
206
+ ![ai1.png](examples%2Fcloud-master%2Fai1.png)
207
+
208
+ ![llama-loaded.png](examples%2Fcloud-master%2Fllama-loaded.png)
209
+
210
+ ![sd3-loaded.png](examples%2Fcloud-master%2Fsd3-loaded.png)
211
+
212
+ ![ai2.png](examples%2Fcloud-master%2Fai2.png)
213
+
214
+ ![ai3.png](examples%2Fcloud-master%2Fai3.png)
215
+ Have fun!
package/README.md CHANGED
@@ -1,115 +1,41 @@
1
- # OpenKBS · [![License: MIT](https://img.shields.io/badge/License-MIT-green.svg)](https://github.com/open-kbs/openkbs-chat/blob/main/LICENSE) [![npm version](https://img.shields.io/badge/npm-v0.0.10-orange.svg)](https://www.npmjs.com/package/openkbs)
1
+ # OpenKBS · [![License: MIT](https://img.shields.io/badge/License-MIT-green.svg)](https://github.com/open-kbs/openkbs-chat/blob/main/LICENSE) [![npm version](https://img.shields.io/badge/npm-v0.0.20-orange.svg)](https://www.npmjs.com/package/openkbs)
2
2
 
3
- OpenKBS is an open-source platform for building and deploying AI agents and applications. Our mission is to provide developers with a flexible and powerful framework that empowers them to create advanced AI agents with ease, using simple text prompts to specify requirements.
3
+ OpenKBS is an extendable open-source platform designed to build, deploy and integrate AI agents anywhere, from websites to IoT devices. Its event-driven architecture enables full customization of backend and frontend components, while the LLM abstraction layer allows seamless switching between language models.
4
4
 
5
- ### Last Updates
6
- - openkbs-ai-server: added support for stable-diffusion-3.5-large
5
+ ## Table of Contents
7
6
 
8
- ### Table of Contents
9
-
10
-
11
- - [Creating Your First AI Agent Manually](#creating-your-first-ai-agent-manually)
12
- - [Step 1: Install CLI](#step-1-install-cli)
13
- - [Step 2: Create new Application](#step-2-create-new-application)
14
- - [Step 3: Understand the Project Structure](#step-3-understand-the-project-structure)
15
- - [Step 4: Deploy Your Application](#step-4-deploy-your-application)
16
- - [Step 5: Enhance Your Application](#step-5-enhance-your-application)
17
- - [Step 6: Local Development](#step-6-local-development)
18
- - [Step 7: Use Built-in MUI Components](#step-7-use-built-in-mui-components)
19
- - [Step 8: Running the Backend Locally (On-Premises)](#step-8-running-the-backend-locally-on-premises)
20
- - [Installing openkbs-ai-server and Integrating Llama 3.1 and Stable Diffusion 3 Locally](#installing-openkbs-ai-server-and-integrating-llama-31-and-stable-diffusion-3-locally)
7
+ - [Install CLI](#install-cli)
8
+ - [Create App](#create-app)
9
+ - [Deploy](#deploy)
10
+ - [Extend Frontend](#extend-frontend)
11
+ - [Setup Local Development](#setup-local-development)
12
+ - [Use Built-in MUI Components](#use-built-in-mui-components)
13
+ - [AI-Powered Generation](#ai-powered-frontend-generation)
14
+ - [Extend Backend](#extend-backend)
15
+ - [Framework Documentation](#framework-documentation)
21
16
  - [License](#license)
22
17
  - [Contributing](#contributing)
23
18
  - [Contact](#contact)
24
19
 
25
- ## Installation
26
-
27
- This module needs to be installed globally, so use the `-g` flag when installing:
28
-
29
- ```bash
30
- npm install -g openkbs
31
- ```
32
- ![ai1.png](examples%2Fcloud-master%2Fai1.png)
33
-
34
- ## Key Features
35
-
36
- [//]: # (- **Generative AI First**: An intuitive development interface designed for human beings. Employs generative AI tools to streamline the development life cycle, enabling rapid requirements gathering, system design, and deployment.)
37
- - **Seamless LLM Integration**: LLM abstraction layer providing a unified interface for various LLM vendors, such as OpenAI, Anthropic, and open-source models like LLaMA and Mistral. This layer allows one-click switching between LLMs without modifying source code, enabling seamless testing across models.
38
- - **Extensive Tooling**: Utilize a broad range of AI tools and services to build robust, scalable AI agents. This includes code execution, database engines, web browsing, image generation, embedding models, speech synthesis, and recognition. These tools enable LLMs to operate autonomously, with more resources continually being added.
39
- - **Open Source**: Provides developers with the freedom to customize, modify, and distribute the software freely.
40
-
41
- ---
42
-
43
-
44
- ## Creating Your First AI Agent Manually
45
-
46
- Follow these steps to create and deploy your first OpenKBS app using React and Node.js,
47
-
48
- ### Step 1: Install CLI
20
+ ## Install CLI
49
21
 
50
22
  First, ensure you have the OpenKBS CLI installed globally:
51
23
 
52
- - **Option 1: using NPM**
53
24
  ```bash
54
25
  npm install -g openkbs
55
26
  ```
56
27
 
57
- - **Option 2: Download Binary**
58
-
59
- - **Linux (x64):**
60
- ```bash
61
- wget -O ~/Downloads/openkbs https://downloads.openkbs.com/cli/linux/openkbs && chmod +x ~/Downloads/openkbs && sudo mv ~/Downloads/openkbs /usr/local/bin/openkbs
62
- ```
63
- - **Windows (x64):**
64
- ```powershell
65
- Invoke-WebRequest -Uri "https://downloads.openkbs.com/cli/windows/openkbs.exe" -OutFile "$Env:USERPROFILE\Downloads\openkbs.exe"
66
- $Env:Path += ";$Env:USERPROFILE\Downloads"
67
- ```
68
-
69
- - **Mac (new M series):**
70
- ```bash
71
- curl -o ~/Downloads/openkbs https://downloads.openkbs.com/cli/macos/openkbs && mkdir -p /usr/local/bin && chmod +x ~/Downloads/openkbs && sudo mv ~/Downloads/openkbs /usr/local/bin/openkbs
72
- ```
73
- - **Mac (old models):**
74
- ```bash
75
- curl -o ~/Downloads/openkbs https://downloads.openkbs.com/cli/macos/openkbs-x64 && mkdir -p /usr/local/bin && chmod +x ~/Downloads/openkbs && sudo mv ~/Downloads/openkbs /usr/local/bin/openkbs
76
- ```
77
-
78
-
79
-
80
- ### Step 2: Create New Application
28
+ ## Create App
81
29
 
82
30
  Create a new application using the OpenKBS CLI:
83
31
 
84
32
  ```bash
85
- openkbs create my-pc-agent
86
- ```
87
-
88
- Navigate into your newly created application directory:
33
+ openkbs create my-agent
89
34
 
90
- ```bash
91
- cd my-pc-agent
35
+ cd my-agent
92
36
  ```
93
37
 
94
- ### Step 3: Understand the Project Structure
95
-
96
- Your application will have the following structure:
97
-
98
- - `./app/icon.png`: Application icon.
99
- - `./app/settings.json`: Application settings.
100
- - `./app/instructions.txt`: Agent instructions.
101
- - `./src/Events/actions.js`: Contains all backend actions (LLM commands).
102
- - `./src/Events/onRequest.js`: Event handler that executes a command on user input.
103
- - `./src/Events/onRequest.json`: Contains all npm package dependencies for onRequest module.
104
- - `./src/Events/onResponse.js`: Similar to `onRequest.js`, but executed against LLM output.
105
- - `./src/Events/onResponse.json`: Contains all npm package dependencies for onResponse module.
106
- - `./src/Frontend/contentRender.js`: Contains frontend components of your application.
107
- - `./src/Frontend/contentRender.json`: Contains all npm package dependencies for contentRender module.
108
- ### Step 4: Deploy Your Application
109
-
110
- You have two options for deployment: OpenKBS Cloud or LocalStack.
111
-
112
- #### Deploy to OpenKBS Cloud
38
+ ## Deploy
113
39
 
114
40
  1. Log in to OpenKBS:
115
41
 
@@ -117,7 +43,7 @@ You have two options for deployment: OpenKBS Cloud or LocalStack.
117
43
  openkbs login
118
44
  ```
119
45
 
120
- 2. Push your application to OpenKBS Cloud:
46
+ 2. Push your application to OpenKBS:
121
47
 
122
48
  ```bash
123
49
  openkbs push
@@ -127,9 +53,9 @@ You have two options for deployment: OpenKBS Cloud or LocalStack.
127
53
 
128
54
  3. Open the provided URL and interact with your application.
129
55
 
130
- ### Step 5: Enhance Your Application
56
+ ## Extend Frontend
131
57
 
132
- To improve your application's rendering, you can use libraries like `react-markdown` for example.
58
+ To improve your application's user interface, you can use libraries like `react-markdown` for example.
133
59
 
134
60
  1. Add `react-markdown` to your dependencies:
135
61
 
@@ -162,7 +88,7 @@ To improve your application's rendering, you can use libraries like `react-markd
162
88
  openkbs push
163
89
  ```
164
90
 
165
- ### Step 6: Local Development
91
+ ### Setup Local Development
166
92
 
167
93
  For faster frontend development, run the OpenKBS UI dev server locally:
168
94
 
@@ -173,7 +99,7 @@ For faster frontend development, run the OpenKBS UI dev server locally:
173
99
 
174
100
  This command opens a browser pointing to `localhost`, allowing automatic rebuilds of your frontend code locally.
175
101
 
176
- ### Step 7: Use Built-in MUI Components
102
+ ### Use Built-in MUI Components
177
103
 
178
104
  Enhance your UI with Material-UI components:
179
105
 
@@ -188,13 +114,13 @@ Enhance your UI with Material-UI components:
188
114
 
189
115
  ```js
190
116
  return (
191
- <AppBar position="absolute" style={{ zIndex: 2000, flexGrow: 1, textAlign: 'left' }}>
117
+ <AppBar position="absolute" style={{ zIndex: 1300, flexGrow: 1, textAlign: 'left' }}>
192
118
  <Toolbar>
193
119
  <IconButton edge="start" color="inherit" aria-label="menu" style={{ marginRight: '16px' }}>
194
120
  <MenuIcon />
195
121
  </IconButton>
196
122
  <Typography variant="h6" style={{ flexGrow: 1 }}>
197
- My PC Agent
123
+ My Agent
198
124
  </Typography>
199
125
  <IconButton edge="end" color="inherit" aria-label="account">
200
126
  <AccountIcon />
@@ -206,231 +132,12 @@ Enhance your UI with Material-UI components:
206
132
 
207
133
  3. Observe real-time rendering by refreshing your browser at http://{kbId}.apps.localhost:38593/
208
134
 
209
- 4. Push the changes to your remote KB:
135
+ 4. Push the changes to your remote app instance:
210
136
 
211
137
  ```bash
212
138
  openkbs push
213
139
  ```
214
140
 
215
- ---
216
-
217
- ### Step 8: Running the Backend Locally (On-Premises)
218
-
219
- To run the backend services of your AI application locally, follow these steps. This allows you to manage chat services, code execution, and AI LLM services on your own infrastructure.
220
-
221
- #### Running the Chat Service Locally
222
-
223
- 1. **Start the Chat Service**:
224
- - Open a new terminal and navigate to the root folder of your application.
225
- - Run the following command:
226
-
227
- ```bash
228
- npm run chat
229
- ```
230
-
231
- - If LocalStack is not installed, you will receive instructions on how to install it based on your platform.
232
- - Open another terminal, navigate to `/tmp`, and install LocalStack using the suggested commands and retrun `npm run chat`
233
-
234
-
235
- 4. **Configure OpenAI Key**:
236
- - Enter your `OPENAI_KEY` when prompted. This key will be stored at `~/.openkbs/.env`.
237
-
238
- 5. **Access the Local Chat Service**:
239
- - Refresh your browser at `http://{kbId}.apps.localhost:38593/chat`.
240
- - You will see "On-Premises" in green text, indicating that your OpenKBS instance is using the local chat server to communicate with the OpenAI streaming API.
241
- - You can remove the cloud models options by setting `"enableCloudModels": false` in `config.json`
242
-
243
- #### Running the Code Execution Service Locally
244
-
245
- 1. **Start the Code Execution Service**:
246
- - Open another terminal tab, navigate to the root folder of your KB app, and run:
247
-
248
- ```bash
249
- npm run code
250
- ```
251
-
252
- 2. **Enter Secrets**:
253
- - You may be prompted to enter any secret placeholders in your `./src/Events/actions.js`. By default, this includes `googlesearch_api_key` and `googlesearch_engine_id`.
254
- - You can press enter to skip, but for using Google Search as an AI tool, it's recommended to fill them. Google provides 100 free searches per day.
255
-
256
- Congratulations! The LLM can now execute NodeJS code directly on your machine!
257
-
258
- #### Enhancing Your Application with Code Execution
259
-
260
- To utilize the code execution feature, follow these steps:
261
-
262
- 1. **Update `contentRender.js`**:
263
- - Modify your local `contentRender.js` file to match the version found at [contentRender.js](./examples/cloud-master/contentRender.js). This update will provide the necessary UI components for local code execution and response rendering.
264
-
265
- 2. **Update `instructions.txt`**:
266
- - Edit your local `instructions.txt` file to include the instructions found at [instructions.txt](./examples/cloud-master/instructions.txt). These instructions will guide the LLM on how to output code and other API commands for execution by the OpenKBS framework.
267
-
268
- 3. **Push the new instructions**:
269
- - we have to push the instructions which are stored encrypted at OpenKBS registry:
270
- ```bash
271
- openkbs push origin app/instructions.txt
272
- ```
273
- - push to localstack to build and deploy all Node.js events - ./src/Events
274
- ```bash
275
- openkbs push localstack
276
- ```
277
- 4. **Requesting the AI to Perform Tasks on Your PC and AWS Cloud**:
278
- - Instruct the AI to list your desktop files, review the code, click `execute`, and click `send`:
279
- ```
280
- List my desktop files
281
- ```
282
- - Instruct the AI to create an S3 bucket and backup your desktop images to it:
283
- ```
284
- Create an S3 bucket and back up my desktop images to it
285
- ```
286
- ![backup.png](examples%2Fcloud-master%2Fbackup.png)
287
- ---
288
-
289
- ## Installing openkbs-ai-server and Integrating Llama 3.1 and Stable Diffusion 3 Locally
290
-
291
- ![llama-loaded.png](examples%2Fcloud-master%2Fllama-loaded.png)
292
-
293
- To set up the `openkbs-ai-server` and integrate advanced AI models like Llama 3.1 and Stable Diffusion 3 on your local machine, follow the steps outlined below.
294
-
295
- ### Prerequisites
296
-
297
- Ensure you have the following prerequisites installed and configured:
298
-
299
- - Ubuntu 22.04 or a compatible Linux distribution.
300
- - Python 3.10 and virtual environment tools.
301
- - Node.js and npm.
302
- - NVIDIA or AMD GPU drivers, depending on your hardware.
303
-
304
- Please follow the installation on [GitHub](https://github.com/open-kbs/openkbs-ai-server).
305
-
306
- ### Step 1: Checkout, Build, and Run
307
-
308
- Clone the `openkbs-ai-server` repository and set up the environment:
309
-
310
- ```bash
311
- git clone git@github.com:open-kbs/openkbs-ai-server.git
312
- cd openkbs-ai-server/cluster
313
- npm i
314
- cd ..
315
- python -m venv .env
316
- source .env/bin/activate
317
- ```
318
-
319
- **IMPORTANT: SELECT THE CORRECT GPU INSTRUCTIONS BELOW. DO NOT EXECUTE BOTH.**
320
-
321
- #### **FOR AMD GPUS:**
322
-
323
- **ONLY FOLLOW THESE INSTRUCTIONS IF YOU HAVE AN AMD GPU.**
324
-
325
- Install necessary libraries and Python packages:
326
-
327
- ```bash
328
- sudo apt-get install -y libjpeg-dev libpng-dev
329
- pip install wheel setuptools
330
- pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/rocm6.1/
331
- pip install -r ./models/requirements_AMD.txt
332
- ```
333
-
334
- #### **FOR NVIDIA GPUS:**
335
-
336
- **ONLY FOLLOW THESE INSTRUCTIONS IF YOU HAVE AN NVIDIA GPU.**
337
-
338
- Install the required Python packages:
339
-
340
- ```bash
341
- pip install torch
342
- pip install -r ./models/requirements_NVIDIA.txt
343
- ```
344
-
345
- ### Step 2: Configure Hugging Face Authentication
346
-
347
- Log in to Hugging Face to access the AI models:
348
-
349
- ```bash
350
- huggingface-cli login
351
- ```
352
-
353
- Enter your Hugging Face token when prompted.
354
-
355
- ### Step 3: Install Global Node.js Packages
356
-
357
- Install global Node.js packages required for running the server:
358
-
359
- ```bash
360
- npm install -g pm2 nodemon react-scripts
361
- ```
362
-
363
- ### Step 4: Start the AI Server
364
-
365
- Launch the AI server using the provided script:
366
-
367
- ```bash
368
- ./start.sh
369
- ```
370
-
371
- This command will start both the frontend and backend services using pm2. Your default web browser should automatically open to [http://localhost:7080/register](http://localhost:7080/register), where you can register the admin account for the AI server.
372
-
373
- ### Step 5: Install AI Models
374
-
375
- In the AI server admin panel, search for and install the following models:
376
-
377
- - **Llama-3.1-8B-Instruct**: Ensure you have access to [Llama-3.1-8B-Instruct](https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct) on Hugging Face.
378
- - **Stable Diffusion 3**: Ensure you have access to [Stable Diffusion 3](https://huggingface.co/stabilityai/stable-diffusion-3-medium) on Hugging Face.
379
-
380
- After installation, restart your chat server to apply the changes.
381
-
382
- ### Step 6: Integrate Stable Diffusion under Events actions, so that Llama can call it
383
-
384
- Go to your app root folder
385
- ```bash
386
- cd my-pc-agent
387
- ```
388
-
389
- Add to `./src/Events/actions.js`
390
- ```javascript
391
- [/\/?textToImage\("(.*)"\)/, async (match) => {
392
- try {
393
- const response = await axios.get(`http://localhost:8080/pipe/stabilityai--stable-diffusion-3-medium-diffusers--default?prompt=${encodeURIComponent(match[1])}`, {
394
- responseType: 'arraybuffer'
395
- });
396
-
397
- const base64Data = Buffer.from(response.data, 'binary').toString('base64');
398
- const contentType = response.headers['content-type'];
399
- const imageSrc = `data:${contentType};base64,${base64Data}`;
400
-
401
- return { type: 'SAVED_CHAT_IMAGE', imageSrc, ...meta };
402
- } catch (error) {
403
- console.error('Error fetching image:', error);
404
- throw error; // or handle the error as needed
405
- }
406
- }]
407
- ```
408
-
409
- Push the changes:
410
- ```bash
411
- openkbs push localstack
412
- ```
413
- ### Step 7: Test Llama agent
414
-
415
- Once the models are installed and the server is running, select `Llama-3.1-8B-Inst` in your Chat Models selection and type in the chat:
416
-
417
- ```
418
- Hey Llama, search Google for the latest AI news and wait, then generate news image. Finally, use a template function to create an HTML page hosted on the S3 bucket 'ai-news-openkbs'.
419
- ```
420
-
421
- ![ai1.png](examples%2Fcloud-master%2Fai1.png)
422
-
423
- ![llama-loaded.png](examples%2Fcloud-master%2Fllama-loaded.png)
424
-
425
- ![sd3-loaded.png](examples%2Fcloud-master%2Fsd3-loaded.png)
426
-
427
- ![ai2.png](examples%2Fcloud-master%2Fai2.png)
428
-
429
- ![ai3.png](examples%2Fcloud-master%2Fai3.png)
430
- Have fun!
431
-
432
- ---
433
-
434
141
  ## License
435
142
 
436
143
  This project is licensed under the MIT License. For more details, please refer to the [LICENSE](https://github.com/open-kbs/openkbs-chat/blob/main/LICENSE) file.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "openkbs",
3
- "version": "0.0.19",
3
+ "version": "0.0.21",
4
4
  "description": "OpenKBS - Command Line Interface",
5
5
  "main": "src/index.js",
6
6
  "scripts": {
package/src/index.js CHANGED
@@ -119,15 +119,15 @@ Examples:
119
119
  $ openkbs deploy contentRender
120
120
  `);
121
121
 
122
- program
123
- .command('sign')
124
- .description('Signs a transaction to request OpenKBS service')
125
- .requiredOption('-a, --toAccountId <toAccountId>', 'Receiver account ID')
126
- .option('-e, --expires <expiresInSeconds>', 'Expiration time in seconds', '60')
127
- .option('-m, --maxAmount <maxAmount>', 'Maximum authorized charge', '300000')
128
- .option('-r, --resourceId <resourceId>', 'Resource ID', 'credits')
129
- .option('-p, --payload <payload>', 'Payload')
130
- .action(signAction);
122
+ // program
123
+ // .command('sign')
124
+ // .description('Signs a transaction to request OpenKBS service')
125
+ // .requiredOption('-a, --toAccountId <toAccountId>', 'Receiver account ID')
126
+ // .option('-e, --expires <expiresInSeconds>', 'Expiration time in seconds', '60')
127
+ // .option('-m, --maxAmount <maxAmount>', 'Maximum authorized charge', '300000')
128
+ // .option('-r, --resourceId <resourceId>', 'Resource ID', 'credits')
129
+ // .option('-p, --payload <payload>', 'Payload')
130
+ // .action(signAction);
131
131
 
132
132
  // Set up the CLI program
133
133
  program
@@ -141,13 +141,4 @@ program
141
141
  .description('Log out from OpenKBS by deleting the locally stored session token.')
142
142
  .action(logoutAction);
143
143
 
144
- // program
145
- // .command('evolve <featureDescription>')
146
- // .description('Evolve the application by providing additional feature requirements before deployment.')
147
- // .action(evolveApplication)
148
- // .addHelpText('after', `
149
- // Examples:
150
- // $ openkbs evolve "Add water tracking feature"
151
- // `);
152
-
153
144
  program.parse(process.argv);
package/src/utils.js CHANGED
@@ -370,7 +370,7 @@ async function modifyKB(kbToken, kbData, prompt, files, options) {
370
370
 
371
371
  const sendMessage = async (message) => {
372
372
  return makePostRequest(url, {
373
- rootToken: kbToken,
373
+ token: kbToken,
374
374
  message: encrypt(message, key),
375
375
  chatId: createdChatId,
376
376
  encrypted: true,