@xyd-js/ask-ai-widget 0.0.0-build-87e0566-20251013171826 → 0.0.0-build-6675456-20251014012658
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/server-standalone.js +145 -9
- package/dist/server.js +217 -75
- package/dist/widget.js +21 -21
- package/package.json +9 -4
- package/server.ts +15 -5
- package/src/__generated__/openapi-spec.ts +238 -0
- package/src/index.ts +29 -6
- package/src/settings.ts +72 -0
- package/src/utils.ts +107 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@xyd-js/ask-ai-widget",
|
|
3
|
-
"version": "0.0.0-build-
|
|
3
|
+
"version": "0.0.0-build-6675456-20251014012658",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"main": "dist/server.js",
|
|
6
6
|
"files": [
|
|
@@ -17,8 +17,12 @@
|
|
|
17
17
|
"express": "^4.18.2",
|
|
18
18
|
"react": "^19.0.0",
|
|
19
19
|
"react-dom": "^19.0.0",
|
|
20
|
-
"@xyd-js/ask-ai": "0.0.0-build-
|
|
21
|
-
"@xyd-js/
|
|
20
|
+
"@xyd-js/ask-ai": "0.0.0-build-6675456-20251014012658",
|
|
21
|
+
"@xyd-js/cli-sdk": "0.1.0-build.0",
|
|
22
|
+
"@xyd-js/mcp-server": "0.0.0-build-6675456-20251014012658"
|
|
23
|
+
},
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"openapi-typescript": "^7.9.1"
|
|
22
26
|
},
|
|
23
27
|
"scripts": {
|
|
24
28
|
"build": "bun run build:widget && bun run build:server && bun run build:server-standalone",
|
|
@@ -26,6 +30,7 @@
|
|
|
26
30
|
"build:server": "NODE_ENV=production bun build server.ts --outdir dist --outfile dist/server.js --target node",
|
|
27
31
|
"build:server-standalone": "AS_MCP=false NODE_ENV=production bun build server-standalone.ts --outdir dist --outfile dist --target node",
|
|
28
32
|
"dev": "bun --watch server.ts",
|
|
29
|
-
"mcp:inspector": "npx @modelcontextprotocol/inspector http://localhost:3500/mcp"
|
|
33
|
+
"mcp:inspector": "npx @modelcontextprotocol/inspector http://localhost:3500/mcp",
|
|
34
|
+
"openapi:generate-spec": "npx openapi-typescript ./openapi.yaml -o ./src/__generated__/openapi-spec.ts"
|
|
30
35
|
}
|
|
31
36
|
}
|
package/server.ts
CHANGED
|
@@ -1,11 +1,21 @@
|
|
|
1
1
|
import { MCPServer } from "@xyd-js/mcp-server/mcp";
|
|
2
2
|
|
|
3
3
|
import { startServer } from "./src/index";
|
|
4
|
+
import { loadSetting } from "./src/utils";
|
|
4
5
|
|
|
5
|
-
|
|
6
|
+
loadSetting()
|
|
7
|
+
.then((settings) => {
|
|
8
|
+
if (!settings.mcp?.url) {
|
|
9
|
+
throw new Error("MCP_SOURCE is not set");
|
|
10
|
+
}
|
|
6
11
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
12
|
+
const openApiSource = settings.mcp?.sources?.openapi || "";
|
|
13
|
+
if (!openApiSource) {
|
|
14
|
+
console.warn("Open API source is not set");
|
|
15
|
+
}
|
|
10
16
|
|
|
11
|
-
|
|
17
|
+
const mcpServer = new MCPServer(openApiSource);
|
|
18
|
+
|
|
19
|
+
return startServer(settings, mcpServer);
|
|
20
|
+
})
|
|
21
|
+
.catch(console.error);
|
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file was auto-generated by openapi-typescript.
|
|
3
|
+
* Do not make direct changes to the file.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export interface paths {
|
|
7
|
+
"/widget.js": {
|
|
8
|
+
parameters: {
|
|
9
|
+
query?: never;
|
|
10
|
+
header?: never;
|
|
11
|
+
path?: never;
|
|
12
|
+
cookie?: never;
|
|
13
|
+
};
|
|
14
|
+
/**
|
|
15
|
+
* Get widget JavaScript bundle
|
|
16
|
+
* @description Serves the compiled widget JavaScript bundle
|
|
17
|
+
*/
|
|
18
|
+
get: {
|
|
19
|
+
parameters: {
|
|
20
|
+
query?: never;
|
|
21
|
+
header?: never;
|
|
22
|
+
path: {
|
|
23
|
+
petId: number;
|
|
24
|
+
};
|
|
25
|
+
cookie?: never;
|
|
26
|
+
};
|
|
27
|
+
requestBody?: never;
|
|
28
|
+
responses: {
|
|
29
|
+
/** @description Widget JavaScript bundle */
|
|
30
|
+
200: {
|
|
31
|
+
headers: {
|
|
32
|
+
[name: string]: unknown;
|
|
33
|
+
};
|
|
34
|
+
content: {
|
|
35
|
+
"application/javascript": string;
|
|
36
|
+
};
|
|
37
|
+
};
|
|
38
|
+
/** @description Widget not found */
|
|
39
|
+
404: {
|
|
40
|
+
headers: {
|
|
41
|
+
[name: string]: unknown;
|
|
42
|
+
};
|
|
43
|
+
content: {
|
|
44
|
+
"text/plain": string;
|
|
45
|
+
};
|
|
46
|
+
};
|
|
47
|
+
};
|
|
48
|
+
};
|
|
49
|
+
put?: never;
|
|
50
|
+
post?: never;
|
|
51
|
+
delete?: never;
|
|
52
|
+
options?: never;
|
|
53
|
+
head?: never;
|
|
54
|
+
patch?: never;
|
|
55
|
+
trace?: never;
|
|
56
|
+
};
|
|
57
|
+
"/ask": {
|
|
58
|
+
parameters: {
|
|
59
|
+
query?: never;
|
|
60
|
+
header?: never;
|
|
61
|
+
path?: never;
|
|
62
|
+
cookie?: never;
|
|
63
|
+
};
|
|
64
|
+
get?: never;
|
|
65
|
+
put?: never;
|
|
66
|
+
/**
|
|
67
|
+
* Ask AI a question
|
|
68
|
+
* @description Send a question to the AI service and get a streaming response
|
|
69
|
+
*/
|
|
70
|
+
post: {
|
|
71
|
+
parameters: {
|
|
72
|
+
query?: never;
|
|
73
|
+
header?: never;
|
|
74
|
+
path?: never;
|
|
75
|
+
cookie?: never;
|
|
76
|
+
};
|
|
77
|
+
requestBody: {
|
|
78
|
+
content: {
|
|
79
|
+
"application/json": {
|
|
80
|
+
/**
|
|
81
|
+
* @description The question to ask the AI
|
|
82
|
+
* @example What is the weather like today?
|
|
83
|
+
*/
|
|
84
|
+
question: string;
|
|
85
|
+
/**
|
|
86
|
+
* @description Additional context for the AI
|
|
87
|
+
* @example I'm working on a React project
|
|
88
|
+
*/
|
|
89
|
+
context?: string;
|
|
90
|
+
};
|
|
91
|
+
};
|
|
92
|
+
};
|
|
93
|
+
responses: {
|
|
94
|
+
/** @description Streaming AI response */
|
|
95
|
+
200: {
|
|
96
|
+
headers: {
|
|
97
|
+
[name: string]: unknown;
|
|
98
|
+
};
|
|
99
|
+
content: {
|
|
100
|
+
"text/plain": string;
|
|
101
|
+
};
|
|
102
|
+
};
|
|
103
|
+
/** @description Internal server error */
|
|
104
|
+
500: {
|
|
105
|
+
headers: {
|
|
106
|
+
[name: string]: unknown;
|
|
107
|
+
};
|
|
108
|
+
content: {
|
|
109
|
+
"text/plain": string;
|
|
110
|
+
};
|
|
111
|
+
};
|
|
112
|
+
};
|
|
113
|
+
};
|
|
114
|
+
delete?: never;
|
|
115
|
+
options?: never;
|
|
116
|
+
head?: never;
|
|
117
|
+
patch?: never;
|
|
118
|
+
trace?: never;
|
|
119
|
+
};
|
|
120
|
+
"/mcp": {
|
|
121
|
+
parameters: {
|
|
122
|
+
query?: never;
|
|
123
|
+
header?: never;
|
|
124
|
+
path?: never;
|
|
125
|
+
cookie?: never;
|
|
126
|
+
};
|
|
127
|
+
/**
|
|
128
|
+
* Handle MCP session request
|
|
129
|
+
* @description Handle Model Context Protocol session requests
|
|
130
|
+
*/
|
|
131
|
+
get: {
|
|
132
|
+
parameters: {
|
|
133
|
+
query?: never;
|
|
134
|
+
header?: never;
|
|
135
|
+
path?: never;
|
|
136
|
+
cookie?: never;
|
|
137
|
+
};
|
|
138
|
+
requestBody?: never;
|
|
139
|
+
responses: {
|
|
140
|
+
/** @description MCP session established */
|
|
141
|
+
200: {
|
|
142
|
+
headers: {
|
|
143
|
+
[name: string]: unknown;
|
|
144
|
+
};
|
|
145
|
+
content?: never;
|
|
146
|
+
};
|
|
147
|
+
/** @description MCP server not available */
|
|
148
|
+
404: {
|
|
149
|
+
headers: {
|
|
150
|
+
[name: string]: unknown;
|
|
151
|
+
};
|
|
152
|
+
content?: never;
|
|
153
|
+
};
|
|
154
|
+
};
|
|
155
|
+
};
|
|
156
|
+
put?: never;
|
|
157
|
+
/**
|
|
158
|
+
* Handle MCP connection request
|
|
159
|
+
* @description Handle Model Context Protocol connection requests
|
|
160
|
+
*/
|
|
161
|
+
post: {
|
|
162
|
+
parameters: {
|
|
163
|
+
query?: never;
|
|
164
|
+
header?: never;
|
|
165
|
+
path?: never;
|
|
166
|
+
cookie?: never;
|
|
167
|
+
};
|
|
168
|
+
requestBody?: never;
|
|
169
|
+
responses: {
|
|
170
|
+
/** @description MCP connection established */
|
|
171
|
+
200: {
|
|
172
|
+
headers: {
|
|
173
|
+
[name: string]: unknown;
|
|
174
|
+
};
|
|
175
|
+
content?: never;
|
|
176
|
+
};
|
|
177
|
+
/** @description MCP server not available */
|
|
178
|
+
404: {
|
|
179
|
+
headers: {
|
|
180
|
+
[name: string]: unknown;
|
|
181
|
+
};
|
|
182
|
+
content?: never;
|
|
183
|
+
};
|
|
184
|
+
};
|
|
185
|
+
};
|
|
186
|
+
/**
|
|
187
|
+
* Handle MCP session termination
|
|
188
|
+
* @description Terminate Model Context Protocol session
|
|
189
|
+
*/
|
|
190
|
+
delete: {
|
|
191
|
+
parameters: {
|
|
192
|
+
query?: never;
|
|
193
|
+
header?: never;
|
|
194
|
+
path?: never;
|
|
195
|
+
cookie?: never;
|
|
196
|
+
};
|
|
197
|
+
requestBody?: never;
|
|
198
|
+
responses: {
|
|
199
|
+
/** @description MCP session terminated */
|
|
200
|
+
200: {
|
|
201
|
+
headers: {
|
|
202
|
+
[name: string]: unknown;
|
|
203
|
+
};
|
|
204
|
+
content?: never;
|
|
205
|
+
};
|
|
206
|
+
/** @description MCP server not available */
|
|
207
|
+
404: {
|
|
208
|
+
headers: {
|
|
209
|
+
[name: string]: unknown;
|
|
210
|
+
};
|
|
211
|
+
content?: never;
|
|
212
|
+
};
|
|
213
|
+
};
|
|
214
|
+
};
|
|
215
|
+
options?: never;
|
|
216
|
+
head?: never;
|
|
217
|
+
patch?: never;
|
|
218
|
+
trace?: never;
|
|
219
|
+
};
|
|
220
|
+
}
|
|
221
|
+
export type webhooks = Record<string, never>;
|
|
222
|
+
export interface components {
|
|
223
|
+
schemas: {
|
|
224
|
+
Error: {
|
|
225
|
+
/** @description Error message */
|
|
226
|
+
message: string;
|
|
227
|
+
/** @description Error code */
|
|
228
|
+
code?: string;
|
|
229
|
+
};
|
|
230
|
+
};
|
|
231
|
+
responses: never;
|
|
232
|
+
parameters: never;
|
|
233
|
+
requestBodies: never;
|
|
234
|
+
headers: never;
|
|
235
|
+
pathItems: never;
|
|
236
|
+
}
|
|
237
|
+
export type $defs = Record<string, never>;
|
|
238
|
+
export type operations = Record<string, never>;
|
package/src/index.ts
CHANGED
|
@@ -2,13 +2,20 @@ import { readFileSync, existsSync } from "node:fs";
|
|
|
2
2
|
import { join, dirname } from "node:path";
|
|
3
3
|
import { createServer } from "node:http";
|
|
4
4
|
import { fileURLToPath } from "node:url";
|
|
5
|
+
|
|
6
|
+
import { Router } from 'express';
|
|
5
7
|
import express from "express";
|
|
6
8
|
|
|
7
9
|
import { handler as askAiHandler } from "@xyd-js/ask-ai/node";
|
|
8
10
|
import type { MCPServer } from "@xyd-js/mcp-server/mcp";
|
|
9
11
|
|
|
12
|
+
import type { Settings } from "./settings";
|
|
13
|
+
|
|
10
14
|
// Start the server
|
|
11
|
-
export async function startServer(
|
|
15
|
+
export async function startServer(
|
|
16
|
+
settings: Settings,
|
|
17
|
+
mcpServer?: MCPServer
|
|
18
|
+
) {
|
|
12
19
|
const widgetPath = findWidgetPath();
|
|
13
20
|
console.log("✅ Widget found:", widgetPath);
|
|
14
21
|
|
|
@@ -23,12 +30,15 @@ export async function startServer(mcpServer?: MCPServer) {
|
|
|
23
30
|
|
|
24
31
|
// Set CORS headers
|
|
25
32
|
app.use((req, res, next) => {
|
|
26
|
-
res.header(
|
|
27
|
-
res.header("Access-Control-Allow-Methods", "GET, POST, OPTIONS");
|
|
28
|
-
res.header("Access-Control-Allow-Headers", "Content-Type");
|
|
33
|
+
res.header(settings.headers);
|
|
29
34
|
next();
|
|
30
35
|
});
|
|
31
36
|
|
|
37
|
+
// Handle preflight OPTIONS requests
|
|
38
|
+
app.options("*", (req, res) => {
|
|
39
|
+
res.status(200).end();
|
|
40
|
+
});
|
|
41
|
+
|
|
32
42
|
// Serve the widget bundle at /widget.js
|
|
33
43
|
app.get("/widget.js", (req, res) => {
|
|
34
44
|
try {
|
|
@@ -46,7 +56,7 @@ export async function startServer(mcpServer?: MCPServer) {
|
|
|
46
56
|
});
|
|
47
57
|
|
|
48
58
|
// Handle ask AI requests at /ask
|
|
49
|
-
app.
|
|
59
|
+
app.post("/ask", async (req, res) => {
|
|
50
60
|
try {
|
|
51
61
|
// Convert Express request to Web API Request
|
|
52
62
|
const body = req.method === "POST" ? JSON.stringify(req.body) : undefined;
|
|
@@ -56,7 +66,20 @@ export async function startServer(mcpServer?: MCPServer) {
|
|
|
56
66
|
body: body,
|
|
57
67
|
});
|
|
58
68
|
|
|
59
|
-
|
|
69
|
+
let mcpUrl = settings.mcp?.url || "";
|
|
70
|
+
if (Array.isArray(mcpUrl)) {
|
|
71
|
+
console.warn("MCP as array is not supported, using the first one");
|
|
72
|
+
mcpUrl = mcpUrl[0];
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const askAiHandlerFn = askAiHandler({
|
|
76
|
+
mcpUrl: mcpUrl,
|
|
77
|
+
aiProvider: settings.ai.provider || "",
|
|
78
|
+
aiModel: settings.ai.model || "",
|
|
79
|
+
aiToken: settings.ai.token || "",
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
const response = await askAiHandlerFn(request);
|
|
60
83
|
|
|
61
84
|
// Copy response headers
|
|
62
85
|
for (const [key, value] of response.headers.entries()) {
|
package/src/settings.ts
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* The settings for the Ask AI widget
|
|
3
|
+
*/
|
|
4
|
+
export interface Settings {
|
|
5
|
+
/**
|
|
6
|
+
* The AI settings to use
|
|
7
|
+
*/
|
|
8
|
+
ai: AI;
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* The MCP settings to use
|
|
12
|
+
*/
|
|
13
|
+
mcp?: MCP;
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* The sources to use
|
|
17
|
+
*/
|
|
18
|
+
sources?: Sources;
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* The headers to use
|
|
22
|
+
* @example { "Access-Control-Allow-Origin": "*", "Access-Control-Allow-Headers": "Content-Type, Authorization, X-Requested-With", "Access-Control-Allow-Methods": "GET, POST, OPTIONS, PUT, DELETE", "Access-Control-Max-Age": "86400" }
|
|
23
|
+
*/
|
|
24
|
+
headers?: Record<string, string>;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export interface AI {
|
|
28
|
+
/**
|
|
29
|
+
* The AI provider to use
|
|
30
|
+
* @example "openai"
|
|
31
|
+
* @example "anthropic"
|
|
32
|
+
*/
|
|
33
|
+
provider: string;
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* The AI model to use
|
|
37
|
+
* @example "gpt-4o"
|
|
38
|
+
* @example "claude-3-5-sonnet-20240620"
|
|
39
|
+
*/
|
|
40
|
+
model: string;
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* The AI token to use
|
|
44
|
+
* @example "sk-1234567890"
|
|
45
|
+
*/
|
|
46
|
+
token: string;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export interface MCP {
|
|
50
|
+
/**
|
|
51
|
+
* The MCP URL to use
|
|
52
|
+
* @example "http://localhost:3000/mcp"
|
|
53
|
+
* @example ["http://localhost:3000/mcp", "http://localhost:3001/mcp"]
|
|
54
|
+
*/
|
|
55
|
+
url: string | string[];
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
export interface Sources {
|
|
59
|
+
/**
|
|
60
|
+
* The OpenAPI sources to use
|
|
61
|
+
* @example "http://localhost:3000/openapi.yaml"
|
|
62
|
+
* @example ["http://localhost:3000/openapi.yaml", "./openapi.yaml"]
|
|
63
|
+
*/
|
|
64
|
+
openapi: string | string[];
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* The LLMs sources to use
|
|
68
|
+
* @example "http://localhost:3000/llms.tx"
|
|
69
|
+
* @example ["http://localhost:3000/llms.txt", "./llms.txt"]
|
|
70
|
+
*/
|
|
71
|
+
llms: string | string[];
|
|
72
|
+
}
|
package/src/utils.ts
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import fs from "node:fs/promises";
|
|
3
|
+
|
|
4
|
+
import { Settings } from "./settings";
|
|
5
|
+
|
|
6
|
+
import { replaceEnvVars } from "@xyd-js/cli-sdk";
|
|
7
|
+
|
|
8
|
+
function getSettingsPath(): string {
|
|
9
|
+
return path.join(process.cwd(), "askai.json");
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export async function loadSetting(): Promise<Settings> {
|
|
13
|
+
const settingsPath = getSettingsPath();
|
|
14
|
+
try {
|
|
15
|
+
await fs.access(settingsPath);
|
|
16
|
+
} catch (error) {
|
|
17
|
+
const settings = newSettings();
|
|
18
|
+
ensureSettings(settings);
|
|
19
|
+
|
|
20
|
+
return settings;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const settingsJSON = await fs.readFile(settingsPath, "utf-8");
|
|
24
|
+
let settings = JSON.parse(settingsJSON);
|
|
25
|
+
settings = replaceEnvVars(settings, true);
|
|
26
|
+
|
|
27
|
+
ensureSettings(settings);
|
|
28
|
+
|
|
29
|
+
return settings;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function newSettings(): Settings {
|
|
33
|
+
return {
|
|
34
|
+
ai: {
|
|
35
|
+
provider: "",
|
|
36
|
+
model: "",
|
|
37
|
+
token: "",
|
|
38
|
+
},
|
|
39
|
+
mcp: {
|
|
40
|
+
url: "",
|
|
41
|
+
},
|
|
42
|
+
headers: {},
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
function ensureSettings(settings: Settings) {
|
|
47
|
+
if (!settings.ai) {
|
|
48
|
+
settings.ai = {
|
|
49
|
+
provider: "",
|
|
50
|
+
model: "",
|
|
51
|
+
token: "",
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
if (!settings.mcp) {
|
|
56
|
+
settings.mcp = {
|
|
57
|
+
url: "",
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (!settings.sources) {
|
|
62
|
+
settings.sources = {
|
|
63
|
+
openapi: "",
|
|
64
|
+
llms: "",
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (process.env.OPENAPI_SOURCE) {
|
|
69
|
+
settings.sources.openapi = process.env.OPENAPI_SOURCE;
|
|
70
|
+
console.log("(env settings): using OPENAPI_SOURCE");
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
if (process.env.LLMS_SOURCE) {
|
|
74
|
+
settings.sources.llms = process.env.LLMS_SOURCE;
|
|
75
|
+
console.log("(env settings): using LLMS_SOURCE");
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
if (process.env.MCP_URL) {
|
|
79
|
+
settings.mcp.url = process.env.MCP_URL;
|
|
80
|
+
console.log("(env settings): using MCP_URL");
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
if (process.env.AI_PROVIDER) {
|
|
84
|
+
settings.ai.provider = process.env.AI_PROVIDER;
|
|
85
|
+
console.log("(env settings): using AI_PROVIDER");
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
if (process.env.AI_MODEL) {
|
|
89
|
+
settings.ai.model = process.env.AI_MODEL;
|
|
90
|
+
console.log("(env settings): using AI_MODEL");
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
if (process.env.AI_TOKEN) {
|
|
94
|
+
settings.ai.token = process.env.AI_TOKEN;
|
|
95
|
+
console.log("(env settings): using AI_TOKEN");
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if (!settings.headers || !Object.keys(settings.headers).length) {
|
|
99
|
+
settings.headers = {
|
|
100
|
+
"Access-Control-Allow-Origin": "*",
|
|
101
|
+
"Access-Control-Allow-Headers":
|
|
102
|
+
"Content-Type, Authorization, X-Requested-With",
|
|
103
|
+
"Access-Control-Allow-Methods": "GET, POST, OPTIONS, PUT, DELETE",
|
|
104
|
+
"Access-Control-Max-Age": "86400",
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
}
|