@openguardrails/gateway 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/dist/config.d.ts +13 -0
  2. package/dist/config.d.ts.map +1 -0
  3. package/dist/config.js +100 -0
  4. package/dist/config.js.map +1 -0
  5. package/dist/handlers/anthropic.d.ts +12 -0
  6. package/dist/handlers/anthropic.d.ts.map +1 -0
  7. package/dist/handlers/anthropic.js +150 -0
  8. package/dist/handlers/anthropic.js.map +1 -0
  9. package/dist/handlers/gemini.d.ts +12 -0
  10. package/dist/handlers/gemini.d.ts.map +1 -0
  11. package/dist/handlers/gemini.js +80 -0
  12. package/dist/handlers/gemini.js.map +1 -0
  13. package/dist/handlers/openai.d.ts +13 -0
  14. package/dist/handlers/openai.d.ts.map +1 -0
  15. package/dist/handlers/openai.js +145 -0
  16. package/dist/handlers/openai.js.map +1 -0
  17. package/dist/index.d.ts +16 -0
  18. package/dist/index.d.ts.map +1 -0
  19. package/dist/index.js +136 -0
  20. package/dist/index.js.map +1 -0
  21. package/dist/restorer.d.ts +21 -0
  22. package/dist/restorer.d.ts.map +1 -0
  23. package/dist/restorer.js +91 -0
  24. package/dist/restorer.js.map +1 -0
  25. package/dist/sanitizer.d.ts +17 -0
  26. package/dist/sanitizer.d.ts.map +1 -0
  27. package/dist/sanitizer.js +226 -0
  28. package/dist/sanitizer.js.map +1 -0
  29. package/dist/types.d.ts +35 -0
  30. package/dist/types.d.ts.map +1 -0
  31. package/dist/types.js +5 -0
  32. package/dist/types.js.map +1 -0
  33. package/package.json +55 -0
  34. package/src/config.ts +122 -0
  35. package/src/handlers/anthropic.ts +195 -0
  36. package/src/handlers/gemini.ts +99 -0
  37. package/src/handlers/openai.ts +188 -0
  38. package/src/index.ts +159 -0
  39. package/src/restorer.ts +101 -0
  40. package/src/sanitizer.ts +278 -0
  41. package/src/types.ts +43 -0
package/src/config.ts ADDED
@@ -0,0 +1,122 @@
1
+ /**
2
+ * Gateway configuration management
3
+ */
4
+
5
+ import { readFileSync, existsSync } from "node:fs";
6
+ import { homedir } from "node:os";
7
+ import { join } from "node:path";
8
+ import type { GatewayConfig } from "./types.js";
9
+
10
+ const DEFAULT_CONFIG_PATH = join(homedir(), ".openguardrails", "gateway.json");
11
+
12
+ /**
13
+ * Load gateway configuration from file or environment
14
+ */
15
+ export function loadConfig(configPath?: string): GatewayConfig {
16
+ const path = configPath || DEFAULT_CONFIG_PATH;
17
+
18
+ // Default configuration
19
+ const defaultConfig: GatewayConfig = {
20
+ port: parseInt(process.env.GATEWAY_PORT || "8900", 10),
21
+ backends: {},
22
+ };
23
+
24
+ // Try to load from file
25
+ if (existsSync(path)) {
26
+ try {
27
+ const fileContent = readFileSync(path, "utf-8");
28
+ const fileConfig = JSON.parse(fileContent);
29
+ return mergeConfig(defaultConfig, fileConfig);
30
+ } catch (error) {
31
+ console.warn(
32
+ `[ai-security-gateway] Failed to load config from ${path}:`,
33
+ error,
34
+ );
35
+ }
36
+ }
37
+
38
+ // Load from environment variables
39
+ return loadFromEnv(defaultConfig);
40
+ }
41
+
42
+ /**
43
+ * Load backend configs from environment variables
44
+ */
45
+ function loadFromEnv(config: GatewayConfig): GatewayConfig {
46
+ // Anthropic
47
+ if (process.env.ANTHROPIC_API_KEY) {
48
+ config.backends.anthropic = {
49
+ baseUrl: process.env.ANTHROPIC_BASE_URL || "https://api.anthropic.com",
50
+ apiKey: process.env.ANTHROPIC_API_KEY,
51
+ };
52
+ }
53
+
54
+ // OpenAI
55
+ if (process.env.OPENAI_API_KEY) {
56
+ config.backends.openai = {
57
+ baseUrl: process.env.OPENAI_BASE_URL || "https://api.openai.com",
58
+ apiKey: process.env.OPENAI_API_KEY,
59
+ };
60
+ }
61
+
62
+ // Kimi (Moonshot)
63
+ if (process.env.KIMI_API_KEY || process.env.MOONSHOT_API_KEY) {
64
+ config.backends.openai = {
65
+ baseUrl:
66
+ process.env.KIMI_BASE_URL || "https://api.moonshot.cn",
67
+ apiKey: process.env.KIMI_API_KEY || process.env.MOONSHOT_API_KEY || "",
68
+ };
69
+ }
70
+
71
+ // Gemini
72
+ if (process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY) {
73
+ config.backends.gemini = {
74
+ baseUrl:
75
+ process.env.GEMINI_BASE_URL ||
76
+ "https://generativelanguage.googleapis.com",
77
+ apiKey: process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY || "",
78
+ };
79
+ }
80
+
81
+ return config;
82
+ }
83
+
84
+ /**
85
+ * Merge file config with default config
86
+ */
87
+ function mergeConfig(
88
+ defaultConfig: GatewayConfig,
89
+ fileConfig: Partial<GatewayConfig>,
90
+ ): GatewayConfig {
91
+ return {
92
+ port: fileConfig.port ?? defaultConfig.port,
93
+ backends: {
94
+ ...defaultConfig.backends,
95
+ ...fileConfig.backends,
96
+ },
97
+ routing: fileConfig.routing,
98
+ };
99
+ }
100
+
101
+ /**
102
+ * Validate configuration
103
+ */
104
+ export function validateConfig(config: GatewayConfig): void {
105
+ if (config.port < 1 || config.port > 65535) {
106
+ throw new Error(`Invalid port: ${config.port}`);
107
+ }
108
+
109
+ // Note: Backends are now optional. Gateway will act as transparent proxy.
110
+ // If no backends configured, gateway will forward requests based on routing rules
111
+ // or pass through to the original target.
112
+
113
+ // Validate each backend (if any)
114
+ for (const [name, backend] of Object.entries(config.backends)) {
115
+ if (!backend.baseUrl) {
116
+ throw new Error(`Backend ${name} missing baseUrl`);
117
+ }
118
+ if (!backend.apiKey) {
119
+ throw new Error(`Backend ${name} missing apiKey`);
120
+ }
121
+ }
122
+ }
@@ -0,0 +1,195 @@
1
+ /**
2
+ * AI Security Gateway - Anthropic Messages API handler
3
+ *
4
+ * Handles POST /v1/messages requests in Anthropic's native format.
5
+ */
6
+
7
+ import type { IncomingMessage, ServerResponse } from "node:http";
8
+ import type { GatewayConfig, MappingTable } from "../types.js";
9
+ import { sanitize } from "../sanitizer.js";
10
+ import { restore, restoreSSELine } from "../restorer.js";
11
+
12
+ /**
13
+ * Handle Anthropic API request
14
+ */
15
+ export async function handleAnthropicRequest(
16
+ req: IncomingMessage,
17
+ res: ServerResponse,
18
+ config: GatewayConfig,
19
+ ): Promise<void> {
20
+ try {
21
+ // 1. Parse request body
22
+ const body = await readBody(req);
23
+ const requestData = JSON.parse(body);
24
+
25
+ const {
26
+ model,
27
+ messages,
28
+ system,
29
+ tools,
30
+ max_tokens,
31
+ temperature,
32
+ stream = false,
33
+ ...rest
34
+ } = requestData;
35
+
36
+ // 2. Sanitize messages
37
+ const { sanitized: sanitizedMessages, mappingTable } = sanitize(messages);
38
+
39
+ // 3. Sanitize system prompt if present
40
+ const sanitizedSystem = system
41
+ ? sanitize(system).sanitized
42
+ : system;
43
+
44
+ // Note: We reuse the same mapping table so placeholders are consistent
45
+
46
+ // 4. Build sanitized request
47
+ const sanitizedRequest = {
48
+ model,
49
+ messages: sanitizedMessages,
50
+ ...(system && { system: sanitizedSystem }),
51
+ ...(tools && { tools }),
52
+ max_tokens,
53
+ ...(temperature !== undefined && { temperature }),
54
+ stream,
55
+ ...rest,
56
+ };
57
+
58
+ // 5. Get backend config
59
+ const backend = config.backends.anthropic;
60
+ if (!backend) {
61
+ res.writeHead(500, { "Content-Type": "application/json" });
62
+ res.end(JSON.stringify({ error: "Anthropic backend not configured" }));
63
+ return;
64
+ }
65
+
66
+ // 6. Forward to real Anthropic API
67
+ const apiUrl = `${backend.baseUrl}/v1/messages`;
68
+ const response = await fetch(apiUrl, {
69
+ method: "POST",
70
+ headers: {
71
+ "Content-Type": "application/json",
72
+ "anthropic-version": req.headers["anthropic-version"] as string || "2023-06-01",
73
+ "x-api-key": backend.apiKey,
74
+ },
75
+ body: JSON.stringify(sanitizedRequest),
76
+ });
77
+
78
+ if (!response.ok) {
79
+ // Forward error response
80
+ res.writeHead(response.status, { "Content-Type": "application/json" });
81
+ const errorBody = await response.text();
82
+ res.end(errorBody);
83
+ return;
84
+ }
85
+
86
+ // 7. Handle streaming response
87
+ if (stream) {
88
+ await handleAnthropicStream(response, res, mappingTable);
89
+ } else {
90
+ await handleAnthropicNonStream(response, res, mappingTable);
91
+ }
92
+ } catch (error) {
93
+ console.error("[ai-security-gateway] Anthropic handler error:", error);
94
+ res.writeHead(500, { "Content-Type": "application/json" });
95
+ res.end(
96
+ JSON.stringify({
97
+ error: "Internal gateway error",
98
+ message: error instanceof Error ? error.message : String(error),
99
+ }),
100
+ );
101
+ }
102
+ }
103
+
104
+ /**
105
+ * Handle streaming response
106
+ */
107
+ async function handleAnthropicStream(
108
+ response: Response,
109
+ res: ServerResponse,
110
+ mappingTable: MappingTable,
111
+ ): Promise<void> {
112
+ // Set SSE headers
113
+ res.writeHead(200, {
114
+ "Content-Type": "text/event-stream",
115
+ "Cache-Control": "no-cache",
116
+ "Connection": "keep-alive",
117
+ });
118
+
119
+ const reader = response.body?.getReader();
120
+ if (!reader) {
121
+ res.end();
122
+ return;
123
+ }
124
+
125
+ const decoder = new TextDecoder();
126
+ let buffer = "";
127
+
128
+ try {
129
+ while (true) {
130
+ const { done, value } = await reader.read();
131
+ if (done) break;
132
+
133
+ // Decode chunk
134
+ buffer += decoder.decode(value, { stream: true });
135
+
136
+ // Process complete lines
137
+ const lines = buffer.split("\n");
138
+ buffer = lines.pop() || ""; // Keep incomplete line in buffer
139
+
140
+ for (const line of lines) {
141
+ if (!line.trim()) {
142
+ res.write("\n");
143
+ continue;
144
+ }
145
+
146
+ // Restore placeholders in SSE line
147
+ const restoredLine = restoreSSELine(line, mappingTable);
148
+ res.write(restoredLine + "\n");
149
+ }
150
+ }
151
+
152
+ // Write any remaining buffer
153
+ if (buffer.trim()) {
154
+ const restoredLine = restoreSSELine(buffer, mappingTable);
155
+ res.write(restoredLine + "\n");
156
+ }
157
+
158
+ res.end();
159
+ } catch (error) {
160
+ console.error("[ai-security-gateway] Stream error:", error);
161
+ res.end();
162
+ }
163
+ }
164
+
165
+ /**
166
+ * Handle non-streaming response
167
+ */
168
+ async function handleAnthropicNonStream(
169
+ response: Response,
170
+ res: ServerResponse,
171
+ mappingTable: MappingTable,
172
+ ): Promise<void> {
173
+ const responseBody = await response.text();
174
+ const responseData = JSON.parse(responseBody);
175
+
176
+ // Restore placeholders in response
177
+ const restoredData = restore(responseData, mappingTable);
178
+
179
+ res.writeHead(200, { "Content-Type": "application/json" });
180
+ res.end(JSON.stringify(restoredData));
181
+ }
182
+
183
+ /**
184
+ * Read request body as string
185
+ */
186
+ function readBody(req: IncomingMessage): Promise<string> {
187
+ return new Promise((resolve, reject) => {
188
+ let body = "";
189
+ req.on("data", (chunk) => {
190
+ body += chunk.toString();
191
+ });
192
+ req.on("end", () => resolve(body));
193
+ req.on("error", reject);
194
+ });
195
+ }
@@ -0,0 +1,99 @@
1
+ /**
2
+ * AI Security Gateway - Google Gemini API handler
3
+ *
4
+ * Handles POST /v1/models/:model:generateContent requests in Gemini's format.
5
+ */
6
+
7
+ import type { IncomingMessage, ServerResponse } from "node:http";
8
+ import type { GatewayConfig, MappingTable } from "../types.js";
9
+ import { sanitize } from "../sanitizer.js";
10
+ import { restore } from "../restorer.js";
11
+
12
+ /**
13
+ * Handle Gemini API request
14
+ */
15
+ export async function handleGeminiRequest(
16
+ req: IncomingMessage,
17
+ res: ServerResponse,
18
+ config: GatewayConfig,
19
+ modelName: string,
20
+ ): Promise<void> {
21
+ try {
22
+ // 1. Parse request body
23
+ const body = await readBody(req);
24
+ const requestData = JSON.parse(body);
25
+
26
+ const { contents, tools, generationConfig, ...rest } = requestData;
27
+
28
+ // 2. Sanitize contents (Gemini uses "contents" instead of "messages")
29
+ const { sanitized: sanitizedContents, mappingTable } = sanitize(contents);
30
+
31
+ // 3. Build sanitized request
32
+ const sanitizedRequest = {
33
+ contents: sanitizedContents,
34
+ ...(tools && { tools }),
35
+ ...(generationConfig && { generationConfig }),
36
+ ...rest,
37
+ };
38
+
39
+ // 4. Get backend config
40
+ const backend = config.backends.gemini;
41
+ if (!backend) {
42
+ res.writeHead(500, { "Content-Type": "application/json" });
43
+ res.end(JSON.stringify({ error: "Gemini backend not configured" }));
44
+ return;
45
+ }
46
+
47
+ // 5. Forward to Gemini API
48
+ const apiUrl = `${backend.baseUrl}/v1/models/${modelName}:generateContent`;
49
+ const response = await fetch(apiUrl, {
50
+ method: "POST",
51
+ headers: {
52
+ "Content-Type": "application/json",
53
+ "x-goog-api-key": backend.apiKey,
54
+ },
55
+ body: JSON.stringify(sanitizedRequest),
56
+ });
57
+
58
+ if (!response.ok) {
59
+ // Forward error response
60
+ res.writeHead(response.status, { "Content-Type": "application/json" });
61
+ const errorBody = await response.text();
62
+ res.end(errorBody);
63
+ return;
64
+ }
65
+
66
+ // 6. Handle response (Gemini typically doesn't stream in same way)
67
+ const responseBody = await response.text();
68
+ const responseData = JSON.parse(responseBody);
69
+
70
+ // Restore placeholders in response
71
+ const restoredData = restore(responseData, mappingTable);
72
+
73
+ res.writeHead(200, { "Content-Type": "application/json" });
74
+ res.end(JSON.stringify(restoredData));
75
+ } catch (error) {
76
+ console.error("[ai-security-gateway] Gemini handler error:", error);
77
+ res.writeHead(500, { "Content-Type": "application/json" });
78
+ res.end(
79
+ JSON.stringify({
80
+ error: "Internal gateway error",
81
+ message: error instanceof Error ? error.message : String(error),
82
+ }),
83
+ );
84
+ }
85
+ }
86
+
87
+ /**
88
+ * Read request body as string
89
+ */
90
+ function readBody(req: IncomingMessage): Promise<string> {
91
+ return new Promise((resolve, reject) => {
92
+ let body = "";
93
+ req.on("data", (chunk) => {
94
+ body += chunk.toString();
95
+ });
96
+ req.on("end", () => resolve(body));
97
+ req.on("error", reject);
98
+ });
99
+ }
@@ -0,0 +1,188 @@
1
+ /**
2
+ * AI Security Gateway - OpenAI Chat Completions API handler
3
+ *
4
+ * Handles POST /v1/chat/completions requests in OpenAI's format.
5
+ * Also compatible with OpenAI-compatible APIs (Kimi, DeepSeek, etc.)
6
+ */
7
+
8
+ import type { IncomingMessage, ServerResponse } from "node:http";
9
+ import type { GatewayConfig, MappingTable } from "../types.js";
10
+ import { sanitize } from "../sanitizer.js";
11
+ import { restore, restoreSSELine } from "../restorer.js";
12
+
13
+ /**
14
+ * Handle OpenAI API request
15
+ */
16
+ export async function handleOpenAIRequest(
17
+ req: IncomingMessage,
18
+ res: ServerResponse,
19
+ config: GatewayConfig,
20
+ ): Promise<void> {
21
+ try {
22
+ // 1. Parse request body
23
+ const body = await readBody(req);
24
+ const requestData = JSON.parse(body);
25
+
26
+ const {
27
+ model,
28
+ messages,
29
+ tools,
30
+ tool_choice,
31
+ temperature,
32
+ max_tokens,
33
+ stream = false,
34
+ ...rest
35
+ } = requestData;
36
+
37
+ // 2. Sanitize messages
38
+ const { sanitized: sanitizedMessages, mappingTable } = sanitize(messages);
39
+
40
+ // 3. Build sanitized request
41
+ const sanitizedRequest = {
42
+ model,
43
+ messages: sanitizedMessages,
44
+ ...(tools && { tools }),
45
+ ...(tool_choice && { tool_choice }),
46
+ ...(temperature !== undefined && { temperature }),
47
+ ...(max_tokens && { max_tokens }),
48
+ stream,
49
+ ...rest,
50
+ };
51
+
52
+ // 4. Get backend config
53
+ const backend = config.backends.openai;
54
+ if (!backend) {
55
+ res.writeHead(500, { "Content-Type": "application/json" });
56
+ res.end(JSON.stringify({ error: "OpenAI backend not configured" }));
57
+ return;
58
+ }
59
+
60
+ // 5. Forward to OpenAI (or compatible) API
61
+ const apiUrl = `${backend.baseUrl}/v1/chat/completions`;
62
+ const response = await fetch(apiUrl, {
63
+ method: "POST",
64
+ headers: {
65
+ "Content-Type": "application/json",
66
+ "Authorization": `Bearer ${backend.apiKey}`,
67
+ },
68
+ body: JSON.stringify(sanitizedRequest),
69
+ });
70
+
71
+ if (!response.ok) {
72
+ // Forward error response
73
+ res.writeHead(response.status, { "Content-Type": "application/json" });
74
+ const errorBody = await response.text();
75
+ res.end(errorBody);
76
+ return;
77
+ }
78
+
79
+ // 6. Handle streaming or non-streaming response
80
+ if (stream) {
81
+ await handleOpenAIStream(response, res, mappingTable);
82
+ } else {
83
+ await handleOpenAINonStream(response, res, mappingTable);
84
+ }
85
+ } catch (error) {
86
+ console.error("[ai-security-gateway] OpenAI handler error:", error);
87
+ res.writeHead(500, { "Content-Type": "application/json" });
88
+ res.end(
89
+ JSON.stringify({
90
+ error: "Internal gateway error",
91
+ message: error instanceof Error ? error.message : String(error),
92
+ }),
93
+ );
94
+ }
95
+ }
96
+
97
+ /**
98
+ * Handle streaming response (SSE)
99
+ */
100
+ async function handleOpenAIStream(
101
+ response: Response,
102
+ res: ServerResponse,
103
+ mappingTable: MappingTable,
104
+ ): Promise<void> {
105
+ // Set SSE headers
106
+ res.writeHead(200, {
107
+ "Content-Type": "text/event-stream",
108
+ "Cache-Control": "no-cache",
109
+ "Connection": "keep-alive",
110
+ });
111
+
112
+ const reader = response.body?.getReader();
113
+ if (!reader) {
114
+ res.end();
115
+ return;
116
+ }
117
+
118
+ const decoder = new TextDecoder();
119
+ let buffer = "";
120
+
121
+ try {
122
+ while (true) {
123
+ const { done, value } = await reader.read();
124
+ if (done) break;
125
+
126
+ // Decode chunk
127
+ buffer += decoder.decode(value, { stream: true });
128
+
129
+ // Process complete lines
130
+ const lines = buffer.split("\n");
131
+ buffer = lines.pop() || ""; // Keep incomplete line in buffer
132
+
133
+ for (const line of lines) {
134
+ if (!line.trim()) {
135
+ res.write("\n");
136
+ continue;
137
+ }
138
+
139
+ // Restore placeholders in SSE line
140
+ const restoredLine = restoreSSELine(line, mappingTable);
141
+ res.write(restoredLine + "\n");
142
+ }
143
+ }
144
+
145
+ // Write any remaining buffer
146
+ if (buffer.trim()) {
147
+ const restoredLine = restoreSSELine(buffer, mappingTable);
148
+ res.write(restoredLine + "\n");
149
+ }
150
+
151
+ res.end();
152
+ } catch (error) {
153
+ console.error("[ai-security-gateway] Stream error:", error);
154
+ res.end();
155
+ }
156
+ }
157
+
158
+ /**
159
+ * Handle non-streaming response
160
+ */
161
+ async function handleOpenAINonStream(
162
+ response: Response,
163
+ res: ServerResponse,
164
+ mappingTable: MappingTable,
165
+ ): Promise<void> {
166
+ const responseBody = await response.text();
167
+ const responseData = JSON.parse(responseBody);
168
+
169
+ // Restore placeholders in response
170
+ const restoredData = restore(responseData, mappingTable);
171
+
172
+ res.writeHead(200, { "Content-Type": "application/json" });
173
+ res.end(JSON.stringify(restoredData));
174
+ }
175
+
176
+ /**
177
+ * Read request body as string
178
+ */
179
+ function readBody(req: IncomingMessage): Promise<string> {
180
+ return new Promise((resolve, reject) => {
181
+ let body = "";
182
+ req.on("data", (chunk) => {
183
+ body += chunk.toString();
184
+ });
185
+ req.on("end", () => resolve(body));
186
+ req.on("error", reject);
187
+ });
188
+ }