@pool4t9/server 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,7 @@
1
+ # @your-username/ai-chatbot-server
2
+
3
+ AI chatbot server package for building intelligent chat interfaces.
4
+
5
+ ## Installation
6
+
7
+ npm install @your-username/ai-chatbot-server
@@ -0,0 +1 @@
1
+ export declare function executeQuery(name: string, args: any, queries: any, db: any, ctx: any): Promise<any>;
@@ -0,0 +1,10 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.executeQuery = executeQuery;
4
+ async function executeQuery(name, args, queries, db, ctx) {
5
+ const query = queries[name];
6
+ if (!query) {
7
+ throw new Error(`Query "${name}" not registered`);
8
+ }
9
+ return query(args, db, ctx);
10
+ }
@@ -0,0 +1,5 @@
1
+ export { initAI } from "./initAI";
2
+ export { createLLM } from "./llm";
3
+ export { buildPrompt } from "./prompt";
4
+ export { executeQuery } from "./executor";
5
+ export type { LLMProvider, AIConfig, QueryContext } from "./type";
package/dist/index.js ADDED
@@ -0,0 +1,11 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.executeQuery = exports.buildPrompt = exports.createLLM = exports.initAI = void 0;
4
+ var initAI_1 = require("./initAI");
5
+ Object.defineProperty(exports, "initAI", { enumerable: true, get: function () { return initAI_1.initAI; } });
6
+ var llm_1 = require("./llm");
7
+ Object.defineProperty(exports, "createLLM", { enumerable: true, get: function () { return llm_1.createLLM; } });
8
+ var prompt_1 = require("./prompt");
9
+ Object.defineProperty(exports, "buildPrompt", { enumerable: true, get: function () { return prompt_1.buildPrompt; } });
10
+ var executor_1 = require("./executor");
11
+ Object.defineProperty(exports, "executeQuery", { enumerable: true, get: function () { return executor_1.executeQuery; } });
@@ -0,0 +1,6 @@
1
+ export declare function initAI(config: any): {
2
+ registerQuery(name: string, fn: Function): void;
3
+ query(message: string, ctx?: {}): Promise<{
4
+ message: any;
5
+ }>;
6
+ };
package/dist/initAI.js ADDED
@@ -0,0 +1,30 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.initAI = initAI;
4
+ const llm_1 = require("./llm");
5
+ const prompt_1 = require("./prompt");
6
+ const executor_1 = require("./executor");
7
+ function initAI(config) {
8
+ const llm = (0, llm_1.createLLM)(config.llm);
9
+ const queries = {};
10
+ return {
11
+ registerQuery(name, fn) {
12
+ queries[name] = fn;
13
+ },
14
+ async query(message, ctx = {}) {
15
+ const prompt = (0, prompt_1.buildPrompt)(config.schema);
16
+ const ai = await llm.chat({
17
+ system: prompt,
18
+ user: message,
19
+ tools: Object.keys(queries)
20
+ });
21
+ if (ai === "NOT_SUPPORTED") {
22
+ return { message: "Sorry, I can't help with that." };
23
+ }
24
+ const result = await (0, executor_1.executeQuery)(ai.name, ai.args, queries, config.db, ctx);
25
+ return {
26
+ message: ai.toText(result)
27
+ };
28
+ }
29
+ };
30
+ }
package/dist/llm.d.ts ADDED
@@ -0,0 +1,3 @@
1
+ export declare function createLLM(config: any): {
2
+ chat(payload: any): Promise<any>;
3
+ };
package/dist/llm.js ADDED
@@ -0,0 +1,21 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createLLM = createLLM;
4
+ function createLLM(config) {
5
+ if (config.provider === "openai") {
6
+ return {
7
+ async chat(payload) {
8
+ // call OpenAI here (pseudo)
9
+ return payload.__mockResponse;
10
+ }
11
+ };
12
+ }
13
+ if (config.provider === "gemini") {
14
+ return {
15
+ async chat(payload) {
16
+ return payload.__mockResponse;
17
+ }
18
+ };
19
+ }
20
+ throw new Error("Unsupported LLM provider");
21
+ }
@@ -0,0 +1 @@
1
+ export declare function buildPrompt(schema: any): string;
package/dist/prompt.js ADDED
@@ -0,0 +1,16 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.buildPrompt = buildPrompt;
4
+ function buildPrompt(schema) {
5
+ return `
6
+ You are an AI assistant for an e-commerce application.
7
+
8
+ You can ONLY answer using the provided schema.
9
+ You MUST return a structured query name and arguments.
10
+
11
+ Schema:
12
+ ${JSON.stringify(schema, null, 2)}
13
+
14
+ If you cannot answer, say "NOT_SUPPORTED".
15
+ `;
16
+ }
package/dist/type.d.ts ADDED
@@ -0,0 +1,19 @@
1
+ export type LLMProvider = "openai" | "gemini";
2
+ export type AIConfig = {
3
+ llm: {
4
+ provider: LLMProvider;
5
+ apiKey: string;
6
+ model?: string;
7
+ };
8
+ db: {
9
+ type: "postgres" | "mysql";
10
+ url: string;
11
+ };
12
+ schema: Record<string, {
13
+ description?: string;
14
+ fields: Record<string, string>;
15
+ }>;
16
+ };
17
+ export type QueryContext = {
18
+ userId?: string;
19
+ };
package/dist/type.js ADDED
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
package/package.json ADDED
@@ -0,0 +1,38 @@
1
+ {
2
+ "name": "@pool4t9/server",
3
+ "version": "1.0.0",
4
+ "description": "AI chatbot server package for building intelligent chat interfaces",
5
+ "main": "dist/index.js",
6
+ "types": "dist/index.d.ts",
7
+ "exports": {
8
+ ".": {
9
+ "types": "./dist/index.d.ts",
10
+ "default": "./dist/index.js"
11
+ }
12
+ },
13
+ "files": [
14
+ "dist",
15
+ "README.md"
16
+ ],
17
+ "scripts": {
18
+ "build": "tsc",
19
+ "prepublishOnly": "npm run build"
20
+ },
21
+ "keywords": [
22
+ "ai",
23
+ "chatbot",
24
+ "llm",
25
+ "openai",
26
+ "gemini"
27
+ ],
28
+ "author": "pool4t7 <gulshan.gupta@proton.me>",
29
+ "license": "MIT",
30
+ "repository": {
31
+ "type": "git",
32
+ "url": "git+https://github.com/pool4t7/ai-chatbot.git",
33
+ "directory": "packages/server"
34
+ },
35
+ "devDependencies": {
36
+ "typescript": "^5.9.3"
37
+ }
38
+ }