@lockllm/sdk 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +81 -0
- package/CODE_OF_CONDUCT.md +130 -0
- package/CONTRIBUTING.md +259 -0
- package/LICENSE +21 -0
- package/README.md +928 -0
- package/SECURITY.md +261 -0
- package/dist/client.d.ts +39 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/client.js +65 -0
- package/dist/client.js.map +1 -0
- package/dist/client.mjs +61 -0
- package/dist/errors.d.ts +60 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/errors.js +175 -0
- package/dist/errors.js.map +1 -0
- package/dist/errors.mjs +164 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +49 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +17 -0
- package/dist/scan.d.ts +32 -0
- package/dist/scan.d.ts.map +1 -0
- package/dist/scan.js +40 -0
- package/dist/scan.js.map +1 -0
- package/dist/scan.mjs +36 -0
- package/dist/types/common.d.ts +31 -0
- package/dist/types/common.d.ts.map +1 -0
- package/dist/types/common.js +6 -0
- package/dist/types/common.js.map +1 -0
- package/dist/types/common.mjs +5 -0
- package/dist/types/errors.d.ts +22 -0
- package/dist/types/errors.d.ts.map +1 -0
- package/dist/types/errors.js +6 -0
- package/dist/types/errors.js.map +1 -0
- package/dist/types/errors.mjs +5 -0
- package/dist/types/providers.d.ts +24 -0
- package/dist/types/providers.d.ts.map +1 -0
- package/dist/types/providers.js +26 -0
- package/dist/types/providers.js.map +1 -0
- package/dist/types/providers.mjs +23 -0
- package/dist/types/scan.d.ts +36 -0
- package/dist/types/scan.d.ts.map +1 -0
- package/dist/types/scan.js +6 -0
- package/dist/types/scan.js.map +1 -0
- package/dist/types/scan.mjs +5 -0
- package/dist/utils.d.ts +84 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/utils.js +225 -0
- package/dist/utils.js.map +1 -0
- package/dist/utils.mjs +215 -0
- package/dist/wrappers/anthropic-wrapper.d.ts +72 -0
- package/dist/wrappers/anthropic-wrapper.d.ts.map +1 -0
- package/dist/wrappers/anthropic-wrapper.js +78 -0
- package/dist/wrappers/anthropic-wrapper.js.map +1 -0
- package/dist/wrappers/anthropic-wrapper.mjs +74 -0
- package/dist/wrappers/generic-wrapper.d.ts +180 -0
- package/dist/wrappers/generic-wrapper.d.ts.map +1 -0
- package/dist/wrappers/generic-wrapper.js +246 -0
- package/dist/wrappers/generic-wrapper.js.map +1 -0
- package/dist/wrappers/generic-wrapper.mjs +225 -0
- package/dist/wrappers/index.d.ts +27 -0
- package/dist/wrappers/index.d.ts.map +1 -0
- package/dist/wrappers/index.js +48 -0
- package/dist/wrappers/index.js.map +1 -0
- package/dist/wrappers/index.mjs +26 -0
- package/dist/wrappers/openai-wrapper.d.ts +70 -0
- package/dist/wrappers/openai-wrapper.d.ts.map +1 -0
- package/dist/wrappers/openai-wrapper.js +76 -0
- package/dist/wrappers/openai-wrapper.js.map +1 -0
- package/dist/wrappers/openai-wrapper.mjs +72 -0
- package/package.json +106 -0
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generic SDK wrapper for any provider
|
|
3
|
+
*
|
|
4
|
+
* This wrapper provides a generic way to create clients for any LLM provider
|
|
5
|
+
* by routing requests through the LockLLM proxy.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* ```typescript
|
|
9
|
+
* import { createClient } from '@lockllm/sdk/wrappers';
|
|
10
|
+
*
|
|
11
|
+
* // For providers with official SDKs
|
|
12
|
+
* const mistral = createClient('mistral', MistralClient, {
|
|
13
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
14
|
+
* });
|
|
15
|
+
*
|
|
16
|
+
* // For providers with OpenAI-compatible APIs
|
|
17
|
+
* const groq = createClient('groq', OpenAI, {
|
|
18
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
19
|
+
* });
|
|
20
|
+
* ```
|
|
21
|
+
*/
|
|
22
|
+
import { getProxyURL } from '../utils';
|
|
23
|
+
/**
|
|
24
|
+
* Create a client for any provider using their official SDK
|
|
25
|
+
*
|
|
26
|
+
* This is a generic factory function that works with any LLM provider SDK
|
|
27
|
+
* by configuring it to use the LockLLM proxy.
|
|
28
|
+
*
|
|
29
|
+
* @param provider - The provider name (e.g., 'openai', 'anthropic', 'mistral')
|
|
30
|
+
* @param ClientConstructor - The provider's SDK client constructor
|
|
31
|
+
* @param config - Configuration options
|
|
32
|
+
* @returns Configured client instance
|
|
33
|
+
*
|
|
34
|
+
* @example
|
|
35
|
+
* ```typescript
|
|
36
|
+
* // Mistral AI
|
|
37
|
+
* import MistralClient from '@mistralai/mistralai';
|
|
38
|
+
* const mistral = createClient('mistral', MistralClient, {
|
|
39
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
40
|
+
* });
|
|
41
|
+
*
|
|
42
|
+
* // Groq (OpenAI-compatible)
|
|
43
|
+
* import OpenAI from 'openai';
|
|
44
|
+
* const groq = createClient('groq', OpenAI, {
|
|
45
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
46
|
+
* });
|
|
47
|
+
*
|
|
48
|
+
* // Cohere
|
|
49
|
+
* import { CohereClient } from 'cohere-ai';
|
|
50
|
+
* const cohere = createClient('cohere', CohereClient, {
|
|
51
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
52
|
+
* });
|
|
53
|
+
* ```
|
|
54
|
+
*/
|
|
55
|
+
export function createClient(provider, ClientConstructor, config) {
|
|
56
|
+
const { apiKey, baseURL, ...otherOptions } = config;
|
|
57
|
+
// Use provided baseURL or default to LockLLM proxy
|
|
58
|
+
const clientBaseURL = baseURL || getProxyURL(provider);
|
|
59
|
+
// Create client with LockLLM proxy configuration
|
|
60
|
+
return new ClientConstructor({
|
|
61
|
+
apiKey,
|
|
62
|
+
baseURL: clientBaseURL,
|
|
63
|
+
...otherOptions,
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Lazy-load OpenAI SDK constructor
|
|
68
|
+
* @internal - exposed for testing purposes
|
|
69
|
+
*/
|
|
70
|
+
export function getGenericOpenAIConstructor(requireFn = require) {
|
|
71
|
+
try {
|
|
72
|
+
const openaiModule = requireFn('openai');
|
|
73
|
+
return openaiModule.default || openaiModule.OpenAI || openaiModule;
|
|
74
|
+
}
|
|
75
|
+
catch (error) {
|
|
76
|
+
throw new Error('OpenAI SDK not found. Please install it with: npm install openai');
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Helper to create OpenAI-compatible clients for providers
|
|
81
|
+
*
|
|
82
|
+
* Many providers (Groq, DeepSeek, Perplexity, etc.) use OpenAI-compatible APIs.
|
|
83
|
+
* This helper makes it easy to create clients for these providers.
|
|
84
|
+
*
|
|
85
|
+
* @param provider - The provider name
|
|
86
|
+
* @param config - Configuration options
|
|
87
|
+
* @returns OpenAI client configured for the provider
|
|
88
|
+
*
|
|
89
|
+
* @example
|
|
90
|
+
* ```typescript
|
|
91
|
+
* // Groq
|
|
92
|
+
* const groq = createOpenAICompatible('groq', {
|
|
93
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
94
|
+
* });
|
|
95
|
+
*
|
|
96
|
+
* // DeepSeek
|
|
97
|
+
* const deepseek = createOpenAICompatible('deepseek', {
|
|
98
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
99
|
+
* });
|
|
100
|
+
*
|
|
101
|
+
* // Use like OpenAI
|
|
102
|
+
* const response = await groq.chat.completions.create({
|
|
103
|
+
* model: 'llama-3.1-70b-versatile',
|
|
104
|
+
* messages: [{ role: 'user', content: 'Hello!' }]
|
|
105
|
+
* });
|
|
106
|
+
* ```
|
|
107
|
+
*/
|
|
108
|
+
export function createOpenAICompatible(provider, config) {
|
|
109
|
+
const OpenAIConstructor = getGenericOpenAIConstructor();
|
|
110
|
+
return createClient(provider, OpenAIConstructor, config);
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Pre-configured factory functions for specific providers
|
|
114
|
+
*/
|
|
115
|
+
/**
|
|
116
|
+
* Create a Groq client (OpenAI-compatible)
|
|
117
|
+
*/
|
|
118
|
+
export function createGroq(config) {
|
|
119
|
+
return createOpenAICompatible('groq', config);
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Create a DeepSeek client (OpenAI-compatible)
|
|
123
|
+
*/
|
|
124
|
+
export function createDeepSeek(config) {
|
|
125
|
+
return createOpenAICompatible('deepseek', config);
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Create a Perplexity client (OpenAI-compatible)
|
|
129
|
+
*/
|
|
130
|
+
export function createPerplexity(config) {
|
|
131
|
+
return createOpenAICompatible('perplexity', config);
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Create a Mistral AI client (OpenAI-compatible)
|
|
135
|
+
*/
|
|
136
|
+
export function createMistral(config) {
|
|
137
|
+
return createOpenAICompatible('mistral', config);
|
|
138
|
+
}
|
|
139
|
+
/**
|
|
140
|
+
* Create an OpenRouter client (OpenAI-compatible)
|
|
141
|
+
*/
|
|
142
|
+
export function createOpenRouter(config) {
|
|
143
|
+
return createOpenAICompatible('openrouter', config);
|
|
144
|
+
}
|
|
145
|
+
/**
|
|
146
|
+
* Create a Together AI client (OpenAI-compatible)
|
|
147
|
+
*/
|
|
148
|
+
export function createTogether(config) {
|
|
149
|
+
return createOpenAICompatible('together', config);
|
|
150
|
+
}
|
|
151
|
+
/**
|
|
152
|
+
* Create an xAI (Grok) client (OpenAI-compatible)
|
|
153
|
+
*/
|
|
154
|
+
export function createXAI(config) {
|
|
155
|
+
return createOpenAICompatible('xai', config);
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Create a Fireworks AI client (OpenAI-compatible)
|
|
159
|
+
*/
|
|
160
|
+
export function createFireworks(config) {
|
|
161
|
+
return createOpenAICompatible('fireworks', config);
|
|
162
|
+
}
|
|
163
|
+
/**
|
|
164
|
+
* Create an Anyscale client (OpenAI-compatible)
|
|
165
|
+
*/
|
|
166
|
+
export function createAnyscale(config) {
|
|
167
|
+
return createOpenAICompatible('anyscale', config);
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Create a Hugging Face client (OpenAI-compatible)
|
|
171
|
+
*/
|
|
172
|
+
export function createHuggingFace(config) {
|
|
173
|
+
return createOpenAICompatible('huggingface', config);
|
|
174
|
+
}
|
|
175
|
+
/**
|
|
176
|
+
* Create a Google Gemini client
|
|
177
|
+
*
|
|
178
|
+
* Note: For Gemini, you should use the @google/generative-ai SDK
|
|
179
|
+
* or an OpenAI-compatible wrapper if available.
|
|
180
|
+
*/
|
|
181
|
+
export function createGemini(config) {
|
|
182
|
+
return createOpenAICompatible('gemini', config);
|
|
183
|
+
}
|
|
184
|
+
/**
|
|
185
|
+
* Get Cohere SDK constructor
|
|
186
|
+
* @internal - exposed for testing purposes
|
|
187
|
+
*/
|
|
188
|
+
export function getCohereConstructor(requireFn = require) {
|
|
189
|
+
try {
|
|
190
|
+
const cohereModule = requireFn('cohere-ai');
|
|
191
|
+
return cohereModule.CohereClient || cohereModule;
|
|
192
|
+
}
|
|
193
|
+
catch (error) {
|
|
194
|
+
throw new Error('Cohere SDK not found. Please install it with: npm install cohere-ai');
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
/**
|
|
198
|
+
* Create a Cohere client
|
|
199
|
+
*
|
|
200
|
+
* Note: For Cohere, you should use the cohere-ai SDK
|
|
201
|
+
*/
|
|
202
|
+
export function createCohere(config) {
|
|
203
|
+
// Check if Cohere SDK is installed
|
|
204
|
+
const CohereConstructor = getCohereConstructor();
|
|
205
|
+
return createClient('cohere', CohereConstructor, config);
|
|
206
|
+
}
|
|
207
|
+
/**
|
|
208
|
+
* Create an Azure OpenAI client
|
|
209
|
+
*/
|
|
210
|
+
export function createAzure(config) {
|
|
211
|
+
return createOpenAICompatible('azure', config);
|
|
212
|
+
}
|
|
213
|
+
/**
|
|
214
|
+
* Create an AWS Bedrock client
|
|
215
|
+
*/
|
|
216
|
+
export function createBedrock(config) {
|
|
217
|
+
return createOpenAICompatible('bedrock', config);
|
|
218
|
+
}
|
|
219
|
+
/**
|
|
220
|
+
* Create a Google Vertex AI client
|
|
221
|
+
*/
|
|
222
|
+
export function createVertexAI(config) {
|
|
223
|
+
return createOpenAICompatible('vertex-ai', config);
|
|
224
|
+
}
|
|
225
|
+
//# sourceMappingURL=generic-wrapper.js.map
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SDK Wrappers - Drop-in replacements for official SDKs
|
|
3
|
+
*
|
|
4
|
+
* @example
|
|
5
|
+
* ```typescript
|
|
6
|
+
* import { createOpenAI, createAnthropic, createGroq } from '@lockllm/sdk/wrappers';
|
|
7
|
+
*
|
|
8
|
+
* const openai = createOpenAI({
|
|
9
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
10
|
+
* });
|
|
11
|
+
*
|
|
12
|
+
* const anthropic = createAnthropic({
|
|
13
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
14
|
+
* });
|
|
15
|
+
*
|
|
16
|
+
* const groq = createGroq({
|
|
17
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
18
|
+
* });
|
|
19
|
+
* ```
|
|
20
|
+
*/
|
|
21
|
+
export { createOpenAI } from './openai-wrapper';
|
|
22
|
+
export type { CreateOpenAIConfig } from './openai-wrapper';
|
|
23
|
+
export { createAnthropic } from './anthropic-wrapper';
|
|
24
|
+
export type { CreateAnthropicConfig } from './anthropic-wrapper';
|
|
25
|
+
export { createClient, createOpenAICompatible, createGroq, createDeepSeek, createPerplexity, createMistral, createOpenRouter, createTogether, createXAI, createFireworks, createAnyscale, createHuggingFace, createGemini, createCohere, createAzure, createBedrock, createVertexAI, } from './generic-wrapper';
|
|
26
|
+
export type { GenericClientConfig } from './generic-wrapper';
|
|
27
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/wrappers/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;GAmBG;AAGH,OAAO,EAAE,YAAY,EAAE,MAAM,kBAAkB,CAAC;AAChD,YAAY,EAAE,kBAAkB,EAAE,MAAM,kBAAkB,CAAC;AAE3D,OAAO,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AACtD,YAAY,EAAE,qBAAqB,EAAE,MAAM,qBAAqB,CAAC;AAGjE,OAAO,EACL,YAAY,EACZ,sBAAsB,EACtB,UAAU,EACV,cAAc,EACd,gBAAgB,EAChB,aAAa,EACb,gBAAgB,EAChB,cAAc,EACd,SAAS,EACT,eAAe,EACf,cAAc,EACd,iBAAiB,EACjB,YAAY,EACZ,YAAY,EACZ,WAAW,EACX,aAAa,EACb,cAAc,GACf,MAAM,mBAAmB,CAAC;AAC3B,YAAY,EAAE,mBAAmB,EAAE,MAAM,mBAAmB,CAAC"}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* SDK Wrappers - Drop-in replacements for official SDKs
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* ```typescript
|
|
7
|
+
* import { createOpenAI, createAnthropic, createGroq } from '@lockllm/sdk/wrappers';
|
|
8
|
+
*
|
|
9
|
+
* const openai = createOpenAI({
|
|
10
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
11
|
+
* });
|
|
12
|
+
*
|
|
13
|
+
* const anthropic = createAnthropic({
|
|
14
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
15
|
+
* });
|
|
16
|
+
*
|
|
17
|
+
* const groq = createGroq({
|
|
18
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
19
|
+
* });
|
|
20
|
+
* ```
|
|
21
|
+
*/
|
|
22
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
23
|
+
exports.createVertexAI = exports.createBedrock = exports.createAzure = exports.createCohere = exports.createGemini = exports.createHuggingFace = exports.createAnyscale = exports.createFireworks = exports.createXAI = exports.createTogether = exports.createOpenRouter = exports.createMistral = exports.createPerplexity = exports.createDeepSeek = exports.createGroq = exports.createOpenAICompatible = exports.createClient = exports.createAnthropic = exports.createOpenAI = void 0;
|
|
24
|
+
// Provider-specific wrappers
|
|
25
|
+
var openai_wrapper_1 = require("./openai-wrapper");
|
|
26
|
+
Object.defineProperty(exports, "createOpenAI", { enumerable: true, get: function () { return openai_wrapper_1.createOpenAI; } });
|
|
27
|
+
var anthropic_wrapper_1 = require("./anthropic-wrapper");
|
|
28
|
+
Object.defineProperty(exports, "createAnthropic", { enumerable: true, get: function () { return anthropic_wrapper_1.createAnthropic; } });
|
|
29
|
+
// Generic wrapper and utilities
|
|
30
|
+
var generic_wrapper_1 = require("./generic-wrapper");
|
|
31
|
+
Object.defineProperty(exports, "createClient", { enumerable: true, get: function () { return generic_wrapper_1.createClient; } });
|
|
32
|
+
Object.defineProperty(exports, "createOpenAICompatible", { enumerable: true, get: function () { return generic_wrapper_1.createOpenAICompatible; } });
|
|
33
|
+
Object.defineProperty(exports, "createGroq", { enumerable: true, get: function () { return generic_wrapper_1.createGroq; } });
|
|
34
|
+
Object.defineProperty(exports, "createDeepSeek", { enumerable: true, get: function () { return generic_wrapper_1.createDeepSeek; } });
|
|
35
|
+
Object.defineProperty(exports, "createPerplexity", { enumerable: true, get: function () { return generic_wrapper_1.createPerplexity; } });
|
|
36
|
+
Object.defineProperty(exports, "createMistral", { enumerable: true, get: function () { return generic_wrapper_1.createMistral; } });
|
|
37
|
+
Object.defineProperty(exports, "createOpenRouter", { enumerable: true, get: function () { return generic_wrapper_1.createOpenRouter; } });
|
|
38
|
+
Object.defineProperty(exports, "createTogether", { enumerable: true, get: function () { return generic_wrapper_1.createTogether; } });
|
|
39
|
+
Object.defineProperty(exports, "createXAI", { enumerable: true, get: function () { return generic_wrapper_1.createXAI; } });
|
|
40
|
+
Object.defineProperty(exports, "createFireworks", { enumerable: true, get: function () { return generic_wrapper_1.createFireworks; } });
|
|
41
|
+
Object.defineProperty(exports, "createAnyscale", { enumerable: true, get: function () { return generic_wrapper_1.createAnyscale; } });
|
|
42
|
+
Object.defineProperty(exports, "createHuggingFace", { enumerable: true, get: function () { return generic_wrapper_1.createHuggingFace; } });
|
|
43
|
+
Object.defineProperty(exports, "createGemini", { enumerable: true, get: function () { return generic_wrapper_1.createGemini; } });
|
|
44
|
+
Object.defineProperty(exports, "createCohere", { enumerable: true, get: function () { return generic_wrapper_1.createCohere; } });
|
|
45
|
+
Object.defineProperty(exports, "createAzure", { enumerable: true, get: function () { return generic_wrapper_1.createAzure; } });
|
|
46
|
+
Object.defineProperty(exports, "createBedrock", { enumerable: true, get: function () { return generic_wrapper_1.createBedrock; } });
|
|
47
|
+
Object.defineProperty(exports, "createVertexAI", { enumerable: true, get: function () { return generic_wrapper_1.createVertexAI; } });
|
|
48
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/wrappers/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;GAmBG;AAEH,6BAA6B;AAC7B,OAAO,EAAE,YAAY,EAAE,MAAM,kBAAkB,CAAC;AAGhD,OAAO,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AAGtD,gCAAgC;AAChC,OAAO,EACL,YAAY,EACZ,sBAAsB,EACtB,UAAU,EACV,cAAc,EACd,gBAAgB,EAChB,aAAa,EACb,gBAAgB,EAChB,cAAc,EACd,SAAS,EACT,eAAe,EACf,cAAc,EACd,iBAAiB,EACjB,YAAY,EACZ,YAAY,EACZ,WAAW,EACX,aAAa,EACb,cAAc,GACf,MAAM,mBAAmB,CAAC"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SDK Wrappers - Drop-in replacements for official SDKs
|
|
3
|
+
*
|
|
4
|
+
* @example
|
|
5
|
+
* ```typescript
|
|
6
|
+
* import { createOpenAI, createAnthropic, createGroq } from '@lockllm/sdk/wrappers';
|
|
7
|
+
*
|
|
8
|
+
* const openai = createOpenAI({
|
|
9
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
10
|
+
* });
|
|
11
|
+
*
|
|
12
|
+
* const anthropic = createAnthropic({
|
|
13
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
14
|
+
* });
|
|
15
|
+
*
|
|
16
|
+
* const groq = createGroq({
|
|
17
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
18
|
+
* });
|
|
19
|
+
* ```
|
|
20
|
+
*/
|
|
21
|
+
// Provider-specific wrappers
|
|
22
|
+
export { createOpenAI } from './openai-wrapper';
|
|
23
|
+
export { createAnthropic } from './anthropic-wrapper';
|
|
24
|
+
// Generic wrapper and utilities
|
|
25
|
+
export { createClient, createOpenAICompatible, createGroq, createDeepSeek, createPerplexity, createMistral, createOpenRouter, createTogether, createXAI, createFireworks, createAnyscale, createHuggingFace, createGemini, createCohere, createAzure, createBedrock, createVertexAI, } from './generic-wrapper';
|
|
26
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI SDK wrapper - Drop-in replacement
|
|
3
|
+
*
|
|
4
|
+
* This wrapper allows you to use the official OpenAI SDK with LockLLM protection
|
|
5
|
+
* by simply changing how you initialize the client.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* ```typescript
|
|
9
|
+
* // Replace this:
|
|
10
|
+
* // import OpenAI from 'openai';
|
|
11
|
+
* // const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
|
12
|
+
*
|
|
13
|
+
* // With this:
|
|
14
|
+
* import { createOpenAI } from '@lockllm/sdk/wrappers';
|
|
15
|
+
* const openai = createOpenAI({
|
|
16
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
17
|
+
* });
|
|
18
|
+
*
|
|
19
|
+
* // Everything else stays the same!
|
|
20
|
+
* const response = await openai.chat.completions.create({
|
|
21
|
+
* model: "gpt-4",
|
|
22
|
+
* messages: [{ role: "user", content: "Hello!" }]
|
|
23
|
+
* });
|
|
24
|
+
* ```
|
|
25
|
+
*/
|
|
26
|
+
export interface CreateOpenAIConfig {
|
|
27
|
+
/**
|
|
28
|
+
* Your LockLLM API key
|
|
29
|
+
* Get it from: https://www.lockllm.com/dashboard
|
|
30
|
+
*/
|
|
31
|
+
apiKey: string;
|
|
32
|
+
/**
|
|
33
|
+
* Base URL for LockLLM proxy (default: https://api.lockllm.com/v1/proxy/openai)
|
|
34
|
+
* Override this only if you're using a custom LockLLM endpoint
|
|
35
|
+
*/
|
|
36
|
+
baseURL?: string;
|
|
37
|
+
/**
|
|
38
|
+
* Other OpenAI client options
|
|
39
|
+
*/
|
|
40
|
+
[key: string]: any;
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Create an OpenAI client that routes through LockLLM proxy
|
|
44
|
+
*
|
|
45
|
+
* All requests are automatically scanned for prompt injection before being
|
|
46
|
+
* forwarded to OpenAI. Your OpenAI API key should be configured in the
|
|
47
|
+
* LockLLM dashboard at https://www.lockllm.com/dashboard
|
|
48
|
+
*
|
|
49
|
+
* @param config - Configuration options
|
|
50
|
+
* @returns OpenAI client instance
|
|
51
|
+
*
|
|
52
|
+
* @example
|
|
53
|
+
* ```typescript
|
|
54
|
+
* const openai = createOpenAI({
|
|
55
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
56
|
+
* });
|
|
57
|
+
*
|
|
58
|
+
* const response = await openai.chat.completions.create({
|
|
59
|
+
* model: "gpt-4",
|
|
60
|
+
* messages: [{ role: "user", content: "Hello!" }]
|
|
61
|
+
* });
|
|
62
|
+
* ```
|
|
63
|
+
*/
|
|
64
|
+
/**
|
|
65
|
+
* Lazy-load OpenAI SDK constructor
|
|
66
|
+
* @internal - exposed for testing purposes
|
|
67
|
+
*/
|
|
68
|
+
export declare function getOpenAIConstructor(requireFn?: NodeJS.Require): any;
|
|
69
|
+
export declare function createOpenAI(config: CreateOpenAIConfig): any;
|
|
70
|
+
//# sourceMappingURL=openai-wrapper.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai-wrapper.d.ts","sourceRoot":"","sources":["../../src/wrappers/openai-wrapper.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AAEH,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;;OAGG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH;;;GAGG;AACH,wBAAgB,oBAAoB,CAAC,SAAS,iBAAU,GAAG,GAAG,CAS7D;AAED,wBAAgB,YAAY,CAAC,MAAM,EAAE,kBAAkB,GAAG,GAAG,CAY5D"}
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* OpenAI SDK wrapper - Drop-in replacement
|
|
4
|
+
*
|
|
5
|
+
* This wrapper allows you to use the official OpenAI SDK with LockLLM protection
|
|
6
|
+
* by simply changing how you initialize the client.
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* ```typescript
|
|
10
|
+
* // Replace this:
|
|
11
|
+
* // import OpenAI from 'openai';
|
|
12
|
+
* // const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
|
13
|
+
*
|
|
14
|
+
* // With this:
|
|
15
|
+
* import { createOpenAI } from '@lockllm/sdk/wrappers';
|
|
16
|
+
* const openai = createOpenAI({
|
|
17
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
18
|
+
* });
|
|
19
|
+
*
|
|
20
|
+
* // Everything else stays the same!
|
|
21
|
+
* const response = await openai.chat.completions.create({
|
|
22
|
+
* model: "gpt-4",
|
|
23
|
+
* messages: [{ role: "user", content: "Hello!" }]
|
|
24
|
+
* });
|
|
25
|
+
* ```
|
|
26
|
+
*/
|
|
27
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
28
|
+
exports.getOpenAIConstructor = getOpenAIConstructor;
|
|
29
|
+
exports.createOpenAI = createOpenAI;
|
|
30
|
+
/**
|
|
31
|
+
* Create an OpenAI client that routes through LockLLM proxy
|
|
32
|
+
*
|
|
33
|
+
* All requests are automatically scanned for prompt injection before being
|
|
34
|
+
* forwarded to OpenAI. Your OpenAI API key should be configured in the
|
|
35
|
+
* LockLLM dashboard at https://www.lockllm.com/dashboard
|
|
36
|
+
*
|
|
37
|
+
* @param config - Configuration options
|
|
38
|
+
* @returns OpenAI client instance
|
|
39
|
+
*
|
|
40
|
+
* @example
|
|
41
|
+
* ```typescript
|
|
42
|
+
* const openai = createOpenAI({
|
|
43
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
44
|
+
* });
|
|
45
|
+
*
|
|
46
|
+
* const response = await openai.chat.completions.create({
|
|
47
|
+
* model: "gpt-4",
|
|
48
|
+
* messages: [{ role: "user", content: "Hello!" }]
|
|
49
|
+
* });
|
|
50
|
+
* ```
|
|
51
|
+
*/
|
|
52
|
+
/**
|
|
53
|
+
* Lazy-load OpenAI SDK constructor
|
|
54
|
+
* @internal - exposed for testing purposes
|
|
55
|
+
*/
|
|
56
|
+
function getOpenAIConstructor(requireFn = require) {
|
|
57
|
+
try {
|
|
58
|
+
const openaiModule = requireFn('openai');
|
|
59
|
+
return openaiModule.default || openaiModule.OpenAI || openaiModule;
|
|
60
|
+
}
|
|
61
|
+
catch (error) {
|
|
62
|
+
throw new Error('OpenAI SDK not found. Please install it with: npm install openai');
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
function createOpenAI(config) {
|
|
66
|
+
// Get OpenAI SDK constructor
|
|
67
|
+
const OpenAIConstructor = getOpenAIConstructor();
|
|
68
|
+
const { apiKey, baseURL, ...otherOptions } = config;
|
|
69
|
+
// Create OpenAI client with LockLLM proxy
|
|
70
|
+
return new OpenAIConstructor({
|
|
71
|
+
apiKey,
|
|
72
|
+
baseURL: baseURL || 'https://api.lockllm.com/v1/proxy/openai',
|
|
73
|
+
...otherOptions,
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
//# sourceMappingURL=openai-wrapper.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai-wrapper.js","sourceRoot":"","sources":["../../src/wrappers/openai-wrapper.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AAqBH;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH;;;GAGG;AACH,MAAM,UAAU,oBAAoB,CAAC,SAAS,GAAG,OAAO;IACtD,IAAI,CAAC;QACH,MAAM,YAAY,GAAG,SAAS,CAAC,QAAQ,CAAC,CAAC;QACzC,OAAO,YAAY,CAAC,OAAO,IAAI,YAAY,CAAC,MAAM,IAAI,YAAY,CAAC;IACrE,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,IAAI,KAAK,CACb,kEAAkE,CACnE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,MAA0B;IACrD,6BAA6B;IAC7B,MAAM,iBAAiB,GAAG,oBAAoB,EAAE,CAAC;IAEjD,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,YAAY,EAAC,GAAG,MAAM,CAAC;IAEnD,0CAA0C;IAC1C,OAAO,IAAI,iBAAiB,CAAC;QAC3B,MAAM;QACN,OAAO,EAAE,OAAO,IAAI,yCAAyC;QAC7D,GAAG,YAAY;KAChB,CAAC,CAAC;AACL,CAAC"}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI SDK wrapper - Drop-in replacement
|
|
3
|
+
*
|
|
4
|
+
* This wrapper allows you to use the official OpenAI SDK with LockLLM protection
|
|
5
|
+
* by simply changing how you initialize the client.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* ```typescript
|
|
9
|
+
* // Replace this:
|
|
10
|
+
* // import OpenAI from 'openai';
|
|
11
|
+
* // const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
|
12
|
+
*
|
|
13
|
+
* // With this:
|
|
14
|
+
* import { createOpenAI } from '@lockllm/sdk/wrappers';
|
|
15
|
+
* const openai = createOpenAI({
|
|
16
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
17
|
+
* });
|
|
18
|
+
*
|
|
19
|
+
* // Everything else stays the same!
|
|
20
|
+
* const response = await openai.chat.completions.create({
|
|
21
|
+
* model: "gpt-4",
|
|
22
|
+
* messages: [{ role: "user", content: "Hello!" }]
|
|
23
|
+
* });
|
|
24
|
+
* ```
|
|
25
|
+
*/
|
|
26
|
+
/**
|
|
27
|
+
* Create an OpenAI client that routes through LockLLM proxy
|
|
28
|
+
*
|
|
29
|
+
* All requests are automatically scanned for prompt injection before being
|
|
30
|
+
* forwarded to OpenAI. Your OpenAI API key should be configured in the
|
|
31
|
+
* LockLLM dashboard at https://www.lockllm.com/dashboard
|
|
32
|
+
*
|
|
33
|
+
* @param config - Configuration options
|
|
34
|
+
* @returns OpenAI client instance
|
|
35
|
+
*
|
|
36
|
+
* @example
|
|
37
|
+
* ```typescript
|
|
38
|
+
* const openai = createOpenAI({
|
|
39
|
+
* apiKey: process.env.LOCKLLM_API_KEY
|
|
40
|
+
* });
|
|
41
|
+
*
|
|
42
|
+
* const response = await openai.chat.completions.create({
|
|
43
|
+
* model: "gpt-4",
|
|
44
|
+
* messages: [{ role: "user", content: "Hello!" }]
|
|
45
|
+
* });
|
|
46
|
+
* ```
|
|
47
|
+
*/
|
|
48
|
+
/**
|
|
49
|
+
* Lazy-load OpenAI SDK constructor
|
|
50
|
+
* @internal - exposed for testing purposes
|
|
51
|
+
*/
|
|
52
|
+
export function getOpenAIConstructor(requireFn = require) {
|
|
53
|
+
try {
|
|
54
|
+
const openaiModule = requireFn('openai');
|
|
55
|
+
return openaiModule.default || openaiModule.OpenAI || openaiModule;
|
|
56
|
+
}
|
|
57
|
+
catch (error) {
|
|
58
|
+
throw new Error('OpenAI SDK not found. Please install it with: npm install openai');
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
export function createOpenAI(config) {
|
|
62
|
+
// Get OpenAI SDK constructor
|
|
63
|
+
const OpenAIConstructor = getOpenAIConstructor();
|
|
64
|
+
const { apiKey, baseURL, ...otherOptions } = config;
|
|
65
|
+
// Create OpenAI client with LockLLM proxy
|
|
66
|
+
return new OpenAIConstructor({
|
|
67
|
+
apiKey,
|
|
68
|
+
baseURL: baseURL || 'https://api.lockllm.com/v1/proxy/openai',
|
|
69
|
+
...otherOptions,
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
//# sourceMappingURL=openai-wrapper.js.map
|
package/package.json
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@lockllm/sdk",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Enterprise-grade AI security SDK providing real-time protection against prompt injection, jailbreaks, and adversarial attacks. Drop-in replacement for OpenAI, Anthropic, and 15+ providers with zero code changes. Includes REST API, proxy mode, browser extension, and webhook support. Free BYOK model with unlimited scanning.",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"module": "./dist/index.mjs",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"import": "./dist/index.mjs",
|
|
11
|
+
"require": "./dist/index.js",
|
|
12
|
+
"types": "./dist/index.d.ts"
|
|
13
|
+
},
|
|
14
|
+
"./wrappers": {
|
|
15
|
+
"import": "./dist/wrappers/index.mjs",
|
|
16
|
+
"require": "./dist/wrappers/index.js",
|
|
17
|
+
"types": "./dist/wrappers/index.d.ts"
|
|
18
|
+
}
|
|
19
|
+
},
|
|
20
|
+
"files": [
|
|
21
|
+
"dist",
|
|
22
|
+
"README.md",
|
|
23
|
+
"LICENSE",
|
|
24
|
+
"CONTRIBUTING.md",
|
|
25
|
+
"SECURITY.md",
|
|
26
|
+
"CODE_OF_CONDUCT.md",
|
|
27
|
+
"CHANGELOG.md"
|
|
28
|
+
],
|
|
29
|
+
"scripts": {
|
|
30
|
+
"build": "npm run build:cjs && npm run build:esm && npm run build:fix-extensions",
|
|
31
|
+
"build:cjs": "tsc",
|
|
32
|
+
"build:esm": "tsc -p tsconfig.esm.json",
|
|
33
|
+
"build:fix-extensions": "node -e \"const fs=require('fs');const path=require('path');function rename(dir){fs.readdirSync(dir,{withFileTypes:true}).forEach(f=>{const p=path.join(dir,f.name);if(f.isDirectory())rename(p);else if(f.name.endsWith('.js')){fs.renameSync(p,p.replace(/\\.js$/,'.mjs'));}})}rename('dist-esm');fs.cpSync('dist-esm',path.join('dist'),{recursive:true});fs.rmSync('dist-esm',{recursive:true,force:true});\"",
|
|
34
|
+
"test": "vitest run",
|
|
35
|
+
"test:watch": "vitest",
|
|
36
|
+
"test:coverage": "vitest run --coverage",
|
|
37
|
+
"lint": "eslint src/**/*.ts",
|
|
38
|
+
"format": "prettier --write \"src/**/*.ts\"",
|
|
39
|
+
"typecheck": "tsc --noEmit",
|
|
40
|
+
"prepublishOnly": "npm run typecheck && npm run test && npm run build"
|
|
41
|
+
},
|
|
42
|
+
"keywords": [
|
|
43
|
+
"openai",
|
|
44
|
+
"anthropic",
|
|
45
|
+
"chatgpt",
|
|
46
|
+
"claude",
|
|
47
|
+
"llm",
|
|
48
|
+
"ai",
|
|
49
|
+
"proxy",
|
|
50
|
+
"security",
|
|
51
|
+
"prompt-injection",
|
|
52
|
+
"jailbreak",
|
|
53
|
+
"openai-sdk",
|
|
54
|
+
"anthropic-sdk",
|
|
55
|
+
"ai-security",
|
|
56
|
+
"llm-security",
|
|
57
|
+
"prompt-security",
|
|
58
|
+
"openai-proxy",
|
|
59
|
+
"chatgpt-api",
|
|
60
|
+
"gpt-5",
|
|
61
|
+
"claude-api",
|
|
62
|
+
"langchain",
|
|
63
|
+
"ai-gateway",
|
|
64
|
+
"llm-firewall",
|
|
65
|
+
"rag",
|
|
66
|
+
"agents",
|
|
67
|
+
"typescript",
|
|
68
|
+
"production"
|
|
69
|
+
],
|
|
70
|
+
"author": "LockLLM",
|
|
71
|
+
"license": "MIT",
|
|
72
|
+
"repository": {
|
|
73
|
+
"type": "git",
|
|
74
|
+
"url": "https://github.com/lockllm/lockllm-npm"
|
|
75
|
+
},
|
|
76
|
+
"homepage": "https://lockllm.com",
|
|
77
|
+
"peerDependencies": {
|
|
78
|
+
"@anthropic-ai/sdk": "^0.20.0",
|
|
79
|
+
"cohere-ai": "^7.0.0",
|
|
80
|
+
"openai": "^4.0.0"
|
|
81
|
+
},
|
|
82
|
+
"peerDependenciesMeta": {
|
|
83
|
+
"openai": {
|
|
84
|
+
"optional": true
|
|
85
|
+
},
|
|
86
|
+
"@anthropic-ai/sdk": {
|
|
87
|
+
"optional": true
|
|
88
|
+
},
|
|
89
|
+
"cohere-ai": {
|
|
90
|
+
"optional": true
|
|
91
|
+
}
|
|
92
|
+
},
|
|
93
|
+
"devDependencies": {
|
|
94
|
+
"@anthropic-ai/sdk": "^0.71.2",
|
|
95
|
+
"@types/node": "^20.19.30",
|
|
96
|
+
"@typescript-eslint/eslint-plugin": "^6.0.0",
|
|
97
|
+
"@typescript-eslint/parser": "^6.0.0",
|
|
98
|
+
"@vitest/coverage-v8": "^1.0.0",
|
|
99
|
+
"cohere-ai": "^7.20.0",
|
|
100
|
+
"eslint": "^8.0.0",
|
|
101
|
+
"openai": "^6.16.0",
|
|
102
|
+
"prettier": "^3.0.0",
|
|
103
|
+
"typescript": "^5.9.3",
|
|
104
|
+
"vitest": "^1.0.0"
|
|
105
|
+
}
|
|
106
|
+
}
|