@derogab/llm-proxy 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ * @derogab
@@ -0,0 +1,27 @@
1
+ # Dependabot Configuration:
2
+ # https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
3
+
4
+ version: 2
5
+ updates:
6
+
7
+ # Maintain dependencies for GitHub Actions
8
+ - package-ecosystem: 'github-actions'
9
+ directory: '/'
10
+ target-branch: 'master'
11
+ schedule:
12
+ interval: 'monthly'
13
+ assignees:
14
+ - 'derogab'
15
+ labels:
16
+ - 'dependencies'
17
+
18
+ # Maintain dependencies for NPM
19
+ - package-ecosystem: 'npm'
20
+ directory: '/'
21
+ target-branch: 'master'
22
+ schedule:
23
+ interval: 'monthly'
24
+ assignees:
25
+ - 'derogab'
26
+ labels:
27
+ - 'dependencies'
@@ -0,0 +1,54 @@
1
+ name: Publish Package to NPM
2
+
3
+ on:
4
+ release:
5
+ types: [published]
6
+ workflow_dispatch:
7
+
8
+ jobs:
9
+ build:
10
+ name: Publish Package
11
+ runs-on: ubuntu-latest
12
+ permissions:
13
+ contents: read
14
+ id-token: write
15
+ steps:
16
+ - name: Checkout
17
+ uses: actions/checkout@v4
18
+ - name: Setup Node
19
+ uses: actions/setup-node@v4
20
+ with:
21
+ node-version: '20.x'
22
+ cache: 'npm'
23
+ registry-url: 'https://registry.npmjs.org'
24
+ always-auth: true
25
+ - name: Install dependencies (clean)
26
+ run: npm ci
27
+ - name: Type check
28
+ run: npx tsc -p tsconfig.json --noEmit
29
+ - name: Run tests
30
+ run: npm test --if-present
31
+ - name: Build
32
+ run: |
33
+ if npm run | grep -q "build"; then
34
+ npm run build
35
+ else
36
+ # Fall back to a standard TS build if no script is defined
37
+ npx tsc -p tsconfig.json
38
+ fi
39
+ - name: Verify tag matches package.json version
40
+ run: |
41
+ PKG_VERSION="$(node -p "require('./package.json').version")"
42
+ TAG_VERSION="${GITHUB_REF_NAME#v}" # supports tags like v1.2.3
43
+ echo "package.json: $PKG_VERSION"
44
+ echo "release tag: $TAG_VERSION"
45
+ if [ "$PKG_VERSION" != "$TAG_VERSION" ]; then
46
+ echo "Release tag ($TAG_VERSION) does not match package.json version ($PKG_VERSION)."
47
+ exit 1
48
+ fi
49
+ - name: Show publish contents (dry run)
50
+ run: npm pack --dry-run
51
+ - name: Publish to npm (with provenance)
52
+ env:
53
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
54
+ run: npm publish --provenance --access public
@@ -0,0 +1,26 @@
1
+ name: Create Release
2
+
3
+ on:
4
+ push:
5
+ # Publish `v1.2.3` tags as releases.
6
+ tags:
7
+ - v*
8
+
9
+ jobs:
10
+ # Release the TAG to GitHub.
11
+ release:
12
+ name: Release pushed tag
13
+ if: startsWith(github.ref, 'refs/tags/')
14
+ permissions:
15
+ contents: write
16
+ runs-on: ubuntu-latest
17
+ steps:
18
+ - name: Create release
19
+ env:
20
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
21
+ tag: ${{ github.ref_name }}
22
+ run: |
23
+ gh release create "$tag" \
24
+ --repo="$GITHUB_REPOSITORY" \
25
+ --title="v${tag#v}" \
26
+ --generate-notes
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Gabriele De Rosa
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,2 @@
1
+ # llm-proxy
2
+ Simple LLM Proxy for seamless API integration
@@ -0,0 +1,15 @@
1
+ import type { ChatCompletionMessageParam } from 'openai/resources';
2
+ import type { Message } from 'ollama';
3
+ export type CloudflareMessage = {
4
+ role: string;
5
+ content: string;
6
+ };
7
+ export type MessageInputParam = ChatCompletionMessageParam | Message | CloudflareMessage;
8
+ /**
9
+ * Generate a response using an LLM.
10
+ *
11
+ * @param messages the messages to be sent to the LLM.
12
+ * @returns the response string.
13
+ */
14
+ export declare function generate(messages: MessageInputParam[]): Promise<MessageInputParam>;
15
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAOA,OAAO,KAAK,EAAE,0BAA0B,EAAE,MAAM,kBAAkB,CAAC;AACnE,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEtC,MAAM,MAAM,iBAAiB,GAAG;IAC9B,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;CACjB,CAAC;AAEF,MAAM,MAAM,iBAAiB,GAAG,0BAA0B,GAAG,OAAO,GAAG,iBAAiB,CAAC;AAoEzF;;;;;GAKG;AACH,wBAAsB,QAAQ,CAAC,QAAQ,EAAE,iBAAiB,EAAE,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAkBxF"}
package/dist/index.js ADDED
@@ -0,0 +1,93 @@
1
+ // Dependencies.
2
+ import axios from 'axios';
3
+ import * as dotenv from 'dotenv';
4
+ import { Ollama } from 'ollama';
5
+ import OpenAI from 'openai';
6
+ // Configs.
7
+ dotenv.config();
8
+ /**
9
+ * Generate a response from the OpenAI API.
10
+ *
11
+ * @param messages the messages to be sent to the OpenAI API.
12
+ * @returns the response string from the OpenAI API.
13
+ */
14
+ async function generate_openai(messages) {
15
+ // Create a new instance of the OpenAI class.
16
+ const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
17
+ // Call the OpenAI API.
18
+ const chatCompletion = await openai.chat.completions.create({
19
+ messages: messages,
20
+ model: process.env.OPENAI_MODEL || 'gpt-4o-mini',
21
+ });
22
+ // Return the response.
23
+ return chatCompletion?.choices[0]?.message;
24
+ }
25
+ /**
26
+ * Generate a response using Ollama Local API.
27
+ *
28
+ * @param messages the messages to be sent to Ollama.
29
+ * @returns the response string.
30
+ */
31
+ async function generate_ollama(messages) {
32
+ // Create a new instance of the OpenAI class.
33
+ const ollama = new Ollama({ host: process.env.OLLAMA_URI || 'http://localhost:11434' });
34
+ // Call the Ollama API.
35
+ const response = await ollama.chat({
36
+ model: process.env.OLLAMA_MODEL || 'llama3.1',
37
+ messages: messages,
38
+ });
39
+ // Return the response.
40
+ return response['message'];
41
+ }
42
+ /**
43
+ * Generate a response using Cloudflare AI API.
44
+ *
45
+ * @param messages the messages to be sent to Cloudflare AI.
46
+ * @returns the response string.
47
+ */
48
+ async function generate_cloudflare(messages) {
49
+ // Generate API URL based on the environment variables.
50
+ const model_url = 'https://api.cloudflare.com/client/v4/accounts/' + process.env.CLOUDFLARE_ACCOUNT_ID + '/ai/run/' + process.env.CLOUDFLARE_MODEL;
51
+ // Call the Cloudflare AI API.
52
+ const response = await axios({
53
+ method: 'post',
54
+ url: model_url,
55
+ headers: {
56
+ 'Authorization': 'Bearer ' + process.env.CLOUDFLARE_AUTH_KEY,
57
+ 'Content-Type': 'application/json',
58
+ },
59
+ data: {
60
+ messages: messages,
61
+ },
62
+ });
63
+ // Extract the response message.
64
+ const msg = response.data.success ? response.data.result.response : '';
65
+ // Return the response.
66
+ return { role: 'assistant', content: msg };
67
+ }
68
+ /**
69
+ * Generate a response using an LLM.
70
+ *
71
+ * @param messages the messages to be sent to the LLM.
72
+ * @returns the response string.
73
+ */
74
+ export async function generate(messages) {
75
+ // Check what LLM to use, based on the environment variables.
76
+ if (process.env.OPENAI_API_KEY) {
77
+ // If openai key is available, use openai.
78
+ return await generate_openai(messages);
79
+ }
80
+ else if (process.env.CLOUDFLARE_ACCOUNT_ID && process.env.CLOUDFLARE_AUTH_KEY && process.env.CLOUDFLARE_MODEL) {
81
+ // If cloudflare keys are available, use cloudflare.
82
+ return await generate_cloudflare(messages);
83
+ }
84
+ else if (process.env.OLLAMA_URI) {
85
+ // If ollama is available, use ollama.
86
+ return await generate_ollama(messages);
87
+ }
88
+ else {
89
+ // Throw an error if no LLM is available.
90
+ throw new Error('No available LLM found.');
91
+ }
92
+ }
93
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,gBAAgB;AAChB,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAChC,OAAO,MAAM,MAAM,QAAQ,CAAC;AAa5B,WAAW;AACX,MAAM,CAAC,MAAM,EAAE,CAAC;AAEhB;;;;;GAKG;AACH,KAAK,UAAU,eAAe,CAAC,QAAsC;IACnE,6CAA6C;IAC7C,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,CAAC,CAAC;IAClE,uBAAuB;IACvB,MAAM,cAAc,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;QAC1D,QAAQ,EAAE,QAAQ;QAClB,KAAK,EAAE,OAAO,CAAC,GAAG,CAAC,YAAY,IAAI,aAAa;KACjD,CAAC,CAAC;IACH,uBAAuB;IACvB,OAAO,cAAc,EAAE,OAAO,CAAC,CAAC,CAAC,EAAE,OAAqC,CAAC;AAC3E,CAAC;AAED;;;;;GAKG;AACH,KAAK,UAAU,eAAe,CAAC,QAAmB;IAChD,6CAA6C;IAC7C,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC,UAAU,IAAI,wBAAwB,EAAE,CAAC,CAAC;IACxF,uBAAuB;IACvB,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC;QACjC,KAAK,EAAE,OAAO,CAAC,GAAG,CAAC,YAAY,IAAI,UAAU;QAC7C,QAAQ,EAAE,QAAQ;KACnB,CAAC,CAAC;IACH,uBAAuB;IACvB,OAAO,QAAQ,CAAC,SAAS,CAAC,CAAC;AAC7B,CAAC;AAED;;;;;GAKG;AACH,KAAK,UAAU,mBAAmB,CAAC,QAA6B;IAC9D,uDAAuD;IACvD,MAAM,SAAS,GAAG,gDAAgD,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB,GAAG,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;IACnJ,8BAA8B;IAC9B,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC;QAC3B,MAAM,EAAE,MAAM;QACd,GAAG,EAAE,SAAS;QACd,OAAO,EAAE;YACP,eAAe,EAAE,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,mBAAmB;YAC5D,cAAc,EAAG,kBAAkB;SACpC;QACD,IAAI,EAAE;YACJ,QAAQ,EAAE,QAAQ;SACnB;KACF,CAAC,CAAC;IACH,gCAAgC;IAChC,MAAM,GAAG,GAAG,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC;IACvE,uBAAuB;IACvB,OAAO,EAAE,IAAI,EAAE,WAAW,EAAE,OAAO,EAAE,GAAG,EAAE,CAAC;AAC7C,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,QAAQ,CAAC,QAA6B;IAC1D,6DAA6D;IAC7D,IAAI,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,CAAC;QAC/B,0CAA0C;QAC1C,OAAO,MAAM,eAAe,CAAC,QAAwC,CAAC,CAAC;IAEzE,CAAC;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,qBAAqB,IAAI,OAAO,CAAC,GAAG,CAAC,mBAAmB,IAAI,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,CAAC;QAChH,oDAAoD;QACpD,OAAO,MAAM,mBAAmB,CAAC,QAA+B,CAAC,CAAC;IAEpE,CAAC;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,CAAC;QAClC,sCAAsC;QACtC,OAAO,MAAM,eAAe,CAAC,QAAqB,CAAC,CAAC;IAEtD,CAAC;SAAM,CAAC;QACN,yCAAyC;QACzC,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC7C,CAAC;AACH,CAAC"}
package/package.json ADDED
@@ -0,0 +1,32 @@
1
+ {
2
+ "name": "@derogab/llm-proxy",
3
+ "description": "Simple LLM Proxy for seamless API integration",
4
+ "version": "0.1.0",
5
+ "author": "derogab",
6
+ "license": "MIT",
7
+ "repository": {
8
+ "type": "git",
9
+ "url": "https://github.com/derogab/llm-proxy.git"
10
+ },
11
+ "main": "dist/index.js",
12
+ "types": "dist/index.d.ts",
13
+ "type": "module",
14
+ "scripts": {
15
+ "build": "tsc"
16
+ },
17
+ "keywords": [
18
+ "LLM",
19
+ "proxy",
20
+ "gateway"
21
+ ],
22
+ "devDependencies": {
23
+ "@types/node": "24.3.0",
24
+ "typescript": "5.9.2"
25
+ },
26
+ "dependencies": {
27
+ "axios": "1.11.0",
28
+ "dotenv": "17.2.1",
29
+ "ollama": "0.5.17",
30
+ "openai": "5.16.0"
31
+ }
32
+ }
package/src/index.ts ADDED
@@ -0,0 +1,108 @@
1
+ // Dependencies.
2
+ import axios from 'axios';
3
+ import * as dotenv from 'dotenv';
4
+ import { Ollama } from 'ollama';
5
+ import OpenAI from 'openai';
6
+
7
+ // Types.
8
+ import type { ChatCompletionMessageParam } from 'openai/resources';
9
+ import type { Message } from 'ollama';
10
+
11
+ export type CloudflareMessage = {
12
+ role: string;
13
+ content: string;
14
+ };
15
+
16
+ export type MessageInputParam = ChatCompletionMessageParam | Message | CloudflareMessage;
17
+
18
+ // Configs.
19
+ dotenv.config();
20
+
21
+ /**
22
+ * Generate a response from the OpenAI API.
23
+ *
24
+ * @param messages the messages to be sent to the OpenAI API.
25
+ * @returns the response string from the OpenAI API.
26
+ */
27
+ async function generate_openai(messages: ChatCompletionMessageParam[]): Promise<ChatCompletionMessageParam> {
28
+ // Create a new instance of the OpenAI class.
29
+ const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
30
+ // Call the OpenAI API.
31
+ const chatCompletion = await openai.chat.completions.create({
32
+ messages: messages,
33
+ model: process.env.OPENAI_MODEL || 'gpt-4o-mini',
34
+ });
35
+ // Return the response.
36
+ return chatCompletion?.choices[0]?.message as ChatCompletionMessageParam;
37
+ }
38
+
39
+ /**
40
+ * Generate a response using Ollama Local API.
41
+ *
42
+ * @param messages the messages to be sent to Ollama.
43
+ * @returns the response string.
44
+ */
45
+ async function generate_ollama(messages: Message[]): Promise<Message> {
46
+ // Create a new instance of the OpenAI class.
47
+ const ollama = new Ollama({ host: process.env.OLLAMA_URI || 'http://localhost:11434' });
48
+ // Call the Ollama API.
49
+ const response = await ollama.chat({
50
+ model: process.env.OLLAMA_MODEL || 'llama3.1',
51
+ messages: messages,
52
+ });
53
+ // Return the response.
54
+ return response['message'];
55
+ }
56
+
57
+ /**
58
+ * Generate a response using Cloudflare AI API.
59
+ *
60
+ * @param messages the messages to be sent to Cloudflare AI.
61
+ * @returns the response string.
62
+ */
63
+ async function generate_cloudflare(messages: CloudflareMessage[]): Promise<CloudflareMessage> {
64
+ // Generate API URL based on the environment variables.
65
+ const model_url = 'https://api.cloudflare.com/client/v4/accounts/' + process.env.CLOUDFLARE_ACCOUNT_ID + '/ai/run/' + process.env.CLOUDFLARE_MODEL;
66
+ // Call the Cloudflare AI API.
67
+ const response = await axios({
68
+ method: 'post',
69
+ url: model_url,
70
+ headers: {
71
+ 'Authorization': 'Bearer ' + process.env.CLOUDFLARE_AUTH_KEY,
72
+ 'Content-Type' : 'application/json',
73
+ },
74
+ data: {
75
+ messages: messages,
76
+ },
77
+ });
78
+ // Extract the response message.
79
+ const msg = response.data.success ? response.data.result.response : '';
80
+ // Return the response.
81
+ return { role: 'assistant', content: msg };
82
+ }
83
+
84
+ /**
85
+ * Generate a response using an LLM.
86
+ *
87
+ * @param messages the messages to be sent to the LLM.
88
+ * @returns the response string.
89
+ */
90
+ export async function generate(messages: MessageInputParam[]): Promise<MessageInputParam> {
91
+ // Check what LLM to use, based on the environment variables.
92
+ if (process.env.OPENAI_API_KEY) {
93
+ // If openai key is available, use openai.
94
+ return await generate_openai(messages as ChatCompletionMessageParam[]);
95
+
96
+ } else if (process.env.CLOUDFLARE_ACCOUNT_ID && process.env.CLOUDFLARE_AUTH_KEY && process.env.CLOUDFLARE_MODEL) {
97
+ // If cloudflare keys are available, use cloudflare.
98
+ return await generate_cloudflare(messages as CloudflareMessage[]);
99
+
100
+ } else if (process.env.OLLAMA_URI) {
101
+ // If ollama is available, use ollama.
102
+ return await generate_ollama(messages as Message[]);
103
+
104
+ } else {
105
+ // Throw an error if no LLM is available.
106
+ throw new Error('No available LLM found.');
107
+ }
108
+ }
package/tsconfig.json ADDED
@@ -0,0 +1,44 @@
1
+ {
2
+ // Visit https://aka.ms/tsconfig to read more about this file
3
+ "compilerOptions": {
4
+ // File Layout
5
+ "rootDir": "./src",
6
+ "outDir": "./dist",
7
+
8
+ // Environment Settings
9
+ // See also https://aka.ms/tsconfig/module
10
+ "module": "nodenext",
11
+ "target": "esnext",
12
+ "types": [],
13
+ // For nodejs:
14
+ // "lib": ["esnext"],
15
+ // "types": ["node"],
16
+ // and npm install -D @types/node
17
+
18
+ // Other Outputs
19
+ "sourceMap": true,
20
+ "declaration": true,
21
+ "declarationMap": true,
22
+
23
+ // Stricter Typechecking Options
24
+ "noUncheckedIndexedAccess": true,
25
+ "exactOptionalPropertyTypes": true,
26
+
27
+ // Style Options
28
+ // "noImplicitReturns": true,
29
+ // "noImplicitOverride": true,
30
+ // "noUnusedLocals": true,
31
+ // "noUnusedParameters": true,
32
+ // "noFallthroughCasesInSwitch": true,
33
+ // "noPropertyAccessFromIndexSignature": true,
34
+
35
+ // Recommended Options
36
+ "strict": true,
37
+ "jsx": "react-jsx",
38
+ "verbatimModuleSyntax": true,
39
+ "isolatedModules": true,
40
+ "noUncheckedSideEffectImports": true,
41
+ "moduleDetection": "force",
42
+ "skipLibCheck": true,
43
+ }
44
+ }