@codepress/codepress-engine 0.4.0-dev.preview-bundle.20251102225420
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +11 -0
- package/README.md +201 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +320 -0
- package/dist/cli.js.map +1 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.js +176 -0
- package/dist/index.js.map +1 -0
- package/dist/previewBundle.d.ts +20 -0
- package/dist/previewBundle.js +246 -0
- package/dist/previewBundle.js.map +1 -0
- package/dist/server.d.ts +29 -0
- package/dist/server.js +1140 -0
- package/dist/server.js.map +1 -0
- package/dist/swc/index.d.ts +16 -0
- package/dist/swc/index.js +126 -0
- package/dist/swc/index.js.map +1 -0
- package/dist/types.d.ts +4 -0
- package/dist/types.js +3 -0
- package/dist/types.js.map +1 -0
- package/package.json +114 -0
- package/swc/codepress_engine.v0_82_87.wasm +0 -0
- package/swc/codepress_engine.v26.wasm +0 -0
- package/swc/codepress_engine.v42.wasm +0 -0
- package/swc/index.js +14 -0
package/dist/server.js
ADDED
|
@@ -0,0 +1,1140 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Codepress Dev Server
|
|
3
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
4
|
+
if (k2 === undefined) k2 = k;
|
|
5
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
6
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
7
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
8
|
+
}
|
|
9
|
+
Object.defineProperty(o, k2, desc);
|
|
10
|
+
}) : (function(o, m, k, k2) {
|
|
11
|
+
if (k2 === undefined) k2 = k;
|
|
12
|
+
o[k2] = m[k];
|
|
13
|
+
}));
|
|
14
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
15
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
16
|
+
}) : function(o, v) {
|
|
17
|
+
o["default"] = v;
|
|
18
|
+
});
|
|
19
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
20
|
+
var ownKeys = function(o) {
|
|
21
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
22
|
+
var ar = [];
|
|
23
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
24
|
+
return ar;
|
|
25
|
+
};
|
|
26
|
+
return ownKeys(o);
|
|
27
|
+
};
|
|
28
|
+
return function (mod) {
|
|
29
|
+
if (mod && mod.__esModule) return mod;
|
|
30
|
+
var result = {};
|
|
31
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
32
|
+
__setModuleDefault(result, mod);
|
|
33
|
+
return result;
|
|
34
|
+
};
|
|
35
|
+
})();
|
|
36
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
37
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
38
|
+
};
|
|
39
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
40
|
+
exports.createApp = createApp;
|
|
41
|
+
exports.getProjectStructure = getProjectStructure;
|
|
42
|
+
exports.startServer = startServer;
|
|
43
|
+
const cors_1 = __importDefault(require("@fastify/cors"));
|
|
44
|
+
const fastify_1 = __importDefault(require("fastify"));
|
|
45
|
+
const fs = __importStar(require("node:fs"));
|
|
46
|
+
const os = __importStar(require("node:os"));
|
|
47
|
+
const path = __importStar(require("node:path"));
|
|
48
|
+
const prettier = __importStar(require("prettier"));
|
|
49
|
+
const index_1 = require("./index");
|
|
50
|
+
/**
|
|
51
|
+
* Normalizes a possibly-relative or malformed absolute path into an absolute path.
|
|
52
|
+
* - Uses CWD for relative paths
|
|
53
|
+
* - Fixes common case where macOS absolute paths lose their leading slash (e.g., "Users/...")
|
|
54
|
+
* @param {string} inputPath
|
|
55
|
+
* @returns {string}
|
|
56
|
+
*/
|
|
57
|
+
function toAbsolutePath(inputPath) {
|
|
58
|
+
if (!inputPath)
|
|
59
|
+
return process.cwd();
|
|
60
|
+
const trimmedPath = String(inputPath).trim();
|
|
61
|
+
// Fix macOS-like absolute paths missing the leading slash, e.g. "Users/..."
|
|
62
|
+
const looksLikePosixAbsNoSlash = process.platform !== "win32" &&
|
|
63
|
+
(trimmedPath.startsWith("Users" + path.sep) ||
|
|
64
|
+
trimmedPath.startsWith("Volumes" + path.sep));
|
|
65
|
+
const candidate = looksLikePosixAbsNoSlash
|
|
66
|
+
? path.sep + trimmedPath
|
|
67
|
+
: trimmedPath;
|
|
68
|
+
return path.isAbsolute(candidate)
|
|
69
|
+
? candidate
|
|
70
|
+
: path.join(process.cwd(), candidate);
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Gets the port to use for the server
|
|
74
|
+
* @returns {number} The configured port
|
|
75
|
+
*/
|
|
76
|
+
function getServerPort() {
|
|
77
|
+
// Use environment variable or default to 4321
|
|
78
|
+
return parseInt(process.env.CODEPRESS_DEV_PORT || "4321", 10);
|
|
79
|
+
}
|
|
80
|
+
function getLockPath(port) {
|
|
81
|
+
const suffix = port ? `-${port}` : "";
|
|
82
|
+
return path.join(os.tmpdir(), `codepress-dev-server${suffix}.lock`);
|
|
83
|
+
}
|
|
84
|
+
function readLockFile(lockPath) {
|
|
85
|
+
try {
|
|
86
|
+
if (!fs.existsSync(lockPath))
|
|
87
|
+
return null;
|
|
88
|
+
const raw = fs.readFileSync(lockPath, "utf8");
|
|
89
|
+
return JSON.parse(raw);
|
|
90
|
+
}
|
|
91
|
+
catch {
|
|
92
|
+
return null;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
function isPidAlive(pid) {
|
|
96
|
+
try {
|
|
97
|
+
// On Unix-like systems, sending signal 0 checks if the process exists
|
|
98
|
+
process.kill(pid, 0);
|
|
99
|
+
return true;
|
|
100
|
+
}
|
|
101
|
+
catch {
|
|
102
|
+
return false;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
function writeLockFile(lockPath, pid) {
|
|
106
|
+
fs.writeFileSync(lockPath, JSON.stringify({ pid, timestamp: Date.now() }));
|
|
107
|
+
}
|
|
108
|
+
function removeLockFile(lockPath) {
|
|
109
|
+
try {
|
|
110
|
+
if (fs.existsSync(lockPath)) {
|
|
111
|
+
fs.unlinkSync(lockPath);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
catch {
|
|
115
|
+
// ignore
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Create a lock file to ensure only one instance runs.
|
|
120
|
+
* Cleans up stale locks automatically.
|
|
121
|
+
* @returns {boolean} True if lock was acquired, false otherwise
|
|
122
|
+
*/
|
|
123
|
+
function acquireLock() {
|
|
124
|
+
const lockPath = getLockPath(getServerPort());
|
|
125
|
+
const existing = readLockFile(lockPath);
|
|
126
|
+
if ((existing === null || existing === void 0 ? void 0 : existing.pid) && isPidAlive(existing.pid)) {
|
|
127
|
+
return false;
|
|
128
|
+
}
|
|
129
|
+
removeLockFile(lockPath);
|
|
130
|
+
try {
|
|
131
|
+
writeLockFile(lockPath, process.pid);
|
|
132
|
+
return true;
|
|
133
|
+
}
|
|
134
|
+
catch (err) {
|
|
135
|
+
console.error("Error acquiring lock:", err);
|
|
136
|
+
return false;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
/**
|
|
140
|
+
* Remove the lock file if this process owns it.
|
|
141
|
+
*/
|
|
142
|
+
function releaseLock() {
|
|
143
|
+
const lockPath = getLockPath(getServerPort());
|
|
144
|
+
const data = readLockFile(lockPath);
|
|
145
|
+
if ((data === null || data === void 0 ? void 0 : data.pid) === process.pid) {
|
|
146
|
+
removeLockFile(lockPath);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
// Track server instance (singleton pattern)
|
|
150
|
+
let serverInstance = null;
|
|
151
|
+
/**
|
|
152
|
+
* Make an HTTP request to the FastAPI backend using fetch
|
|
153
|
+
* @param {string} method The HTTP method
|
|
154
|
+
* @param {string} endpoint The API endpoint
|
|
155
|
+
* @param {Object} data The request payload
|
|
156
|
+
* @param {string} incomingAuthHeader The incoming Authorization header
|
|
157
|
+
* @returns {Promise<Object>} The response data
|
|
158
|
+
*/
|
|
159
|
+
/**
|
|
160
|
+
* Call backend API with streaming support
|
|
161
|
+
* @param {string} method HTTP method
|
|
162
|
+
* @param {string} endpoint API endpoint
|
|
163
|
+
* @param {Object} data Request data
|
|
164
|
+
* @param {string} incomingAuthHeader Authorization header
|
|
165
|
+
* @param {Function} onStreamEvent Callback for stream events
|
|
166
|
+
* @returns {Promise<Object>} Final API response
|
|
167
|
+
*/
|
|
168
|
+
async function callBackendApiStreaming(method, endpoint, data, incomingAuthHeader, onStreamEvent) {
|
|
169
|
+
var _a;
|
|
170
|
+
// Backend API settings
|
|
171
|
+
const apiHost = process.env.CODEPRESS_BACKEND_HOST || "localhost";
|
|
172
|
+
const apiPort = parseInt(process.env.CODEPRESS_BACKEND_PORT || "8007", 10);
|
|
173
|
+
const apiPath = endpoint.startsWith("/")
|
|
174
|
+
? endpoint.replace("/", "")
|
|
175
|
+
: endpoint;
|
|
176
|
+
const protocol = apiHost === "localhost" || apiHost === "127.0.0.1" ? "http" : "https";
|
|
177
|
+
const url = `${protocol}://${apiHost}:${apiPort}/${apiPath}`;
|
|
178
|
+
const requestOptions = {
|
|
179
|
+
method,
|
|
180
|
+
headers: {
|
|
181
|
+
"Content-Type": "application/json",
|
|
182
|
+
Accept: "text/event-stream",
|
|
183
|
+
},
|
|
184
|
+
};
|
|
185
|
+
if (incomingAuthHeader) {
|
|
186
|
+
requestOptions.headers.Authorization = incomingAuthHeader;
|
|
187
|
+
}
|
|
188
|
+
if (data) {
|
|
189
|
+
requestOptions.body = JSON.stringify(data);
|
|
190
|
+
}
|
|
191
|
+
try {
|
|
192
|
+
console.log(`\x1b[36mℹ Calling backend streaming API: ${method} ${url}\x1b[0m`);
|
|
193
|
+
const response = await fetch(url, requestOptions);
|
|
194
|
+
if (!response.ok) {
|
|
195
|
+
const errorText = await response.text();
|
|
196
|
+
throw new Error(`Backend API error (${response.status}): ${errorText}`);
|
|
197
|
+
}
|
|
198
|
+
// Handle streaming response
|
|
199
|
+
if (((_a = response.headers.get("content-type")) === null || _a === void 0 ? void 0 : _a.includes("text/event-stream")) &&
|
|
200
|
+
onStreamEvent) {
|
|
201
|
+
const body = response.body;
|
|
202
|
+
if (!body) {
|
|
203
|
+
throw new Error("Backend streaming response has no body");
|
|
204
|
+
}
|
|
205
|
+
const reader = body.getReader();
|
|
206
|
+
const decoder = new TextDecoder();
|
|
207
|
+
let finalResult = null;
|
|
208
|
+
try {
|
|
209
|
+
while (true) {
|
|
210
|
+
const { done, value } = await reader.read();
|
|
211
|
+
if (done)
|
|
212
|
+
break;
|
|
213
|
+
const chunk = decoder.decode(value, { stream: true });
|
|
214
|
+
const lines = chunk.split("\n");
|
|
215
|
+
for (const line of lines) {
|
|
216
|
+
if (line.startsWith("data: ")) {
|
|
217
|
+
try {
|
|
218
|
+
const eventData = JSON.parse(line.slice(6));
|
|
219
|
+
// Forward the event to the client
|
|
220
|
+
if (onStreamEvent) {
|
|
221
|
+
onStreamEvent(eventData);
|
|
222
|
+
}
|
|
223
|
+
// Capture final result if this is a completion event
|
|
224
|
+
if (eventData.type === "final_result") {
|
|
225
|
+
finalResult = eventData.result;
|
|
226
|
+
}
|
|
227
|
+
else if (eventData.type === "complete") {
|
|
228
|
+
// Use the last result we captured
|
|
229
|
+
break;
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
catch (parseError) {
|
|
233
|
+
console.error("Error parsing SSE data:", parseError, "Line:", line);
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
finally {
|
|
240
|
+
reader.releaseLock();
|
|
241
|
+
}
|
|
242
|
+
// Return the agent's final result payload; may be undefined if no final_result arrived
|
|
243
|
+
return finalResult;
|
|
244
|
+
}
|
|
245
|
+
else {
|
|
246
|
+
// Fallback to regular JSON response
|
|
247
|
+
return (await response.json());
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
catch (error) {
|
|
251
|
+
console.error(`\x1b[31m✗ Backend streaming API call failed: ${error.message}\x1b[0m`);
|
|
252
|
+
throw error;
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
async function callBackendApi(method, endpoint, data, incomingAuthHeader) {
|
|
256
|
+
// Backend API settings
|
|
257
|
+
const apiHost = process.env.CODEPRESS_BACKEND_HOST || "localhost";
|
|
258
|
+
const apiPort = parseInt(process.env.CODEPRESS_BACKEND_PORT || "8007", 10);
|
|
259
|
+
const apiPath = endpoint.startsWith("/")
|
|
260
|
+
? endpoint.replace("/", "")
|
|
261
|
+
: endpoint;
|
|
262
|
+
// Build the complete URL - detect if using localhost for HTTP, otherwise use HTTPS
|
|
263
|
+
const protocol = apiHost === "localhost" || apiHost === "127.0.0.1" ? "http" : "https";
|
|
264
|
+
console.log(`\x1b[36mℹ API Path: ${apiPath} \x1b[0m`);
|
|
265
|
+
const url = `${protocol}://${apiHost}${apiPort ? `:${apiPort}` : ""}/v1/${apiPath}`;
|
|
266
|
+
console.log(`\x1b[36mℹ Sending request to ${url} \x1b[0m`);
|
|
267
|
+
try {
|
|
268
|
+
// First try to use API token from environment variable
|
|
269
|
+
let authToken = process.env.CODEPRESS_API_TOKEN;
|
|
270
|
+
// Debug: Log environment token status
|
|
271
|
+
console.log(`\x1b[36mℹ Environment API token: ${authToken ? "[PRESENT]" : "[NOT SET]"}\x1b[0m`);
|
|
272
|
+
console.log(`\x1b[36mℹ Incoming auth header: ${incomingAuthHeader ? "[PRESENT]" : "[NOT PROVIDED]"}\x1b[0m`);
|
|
273
|
+
// If no API token, try to use the incoming Authorization header
|
|
274
|
+
if (!authToken && incomingAuthHeader) {
|
|
275
|
+
authToken = incomingAuthHeader.split(" ")[1]; // Extract token part
|
|
276
|
+
console.log(`\x1b[36mℹ Using incoming Authorization header for authentication\x1b[0m`);
|
|
277
|
+
}
|
|
278
|
+
// Prepare headers with authentication if token exists
|
|
279
|
+
const headers = {
|
|
280
|
+
"Content-Type": "application/json",
|
|
281
|
+
};
|
|
282
|
+
if (authToken) {
|
|
283
|
+
headers["Authorization"] = `Bearer ${authToken}`;
|
|
284
|
+
// Log which auth method we're using (but don't expose the actual token)
|
|
285
|
+
console.log(`\x1b[36mℹ Using ${process.env.CODEPRESS_API_TOKEN ? "API Token" : "GitHub OAuth Token"} for authentication\x1b[0m`);
|
|
286
|
+
console.log(`\x1b[36mℹ Final auth header: Bearer ${authToken.substring(0, 10)}...\x1b[0m`);
|
|
287
|
+
}
|
|
288
|
+
else {
|
|
289
|
+
console.log("\x1b[33m⚠ No authentication token available\x1b[0m");
|
|
290
|
+
}
|
|
291
|
+
const response = await fetch(url, {
|
|
292
|
+
method,
|
|
293
|
+
headers,
|
|
294
|
+
body: data ? JSON.stringify(data) : undefined,
|
|
295
|
+
});
|
|
296
|
+
// Get the response text
|
|
297
|
+
const responseText = await response.text();
|
|
298
|
+
// Debug: Log response status and preview
|
|
299
|
+
console.log(`\x1b[36mℹ Response status: ${response.status}\x1b[0m`);
|
|
300
|
+
console.log(`\x1b[36mℹ Response preview: ${responseText.substring(0, 100)}...\x1b[0m`);
|
|
301
|
+
// Check if response is successful
|
|
302
|
+
if (!response.ok) {
|
|
303
|
+
throw new Error(`API request failed with status ${response.status}: ${responseText}`);
|
|
304
|
+
}
|
|
305
|
+
// Try to parse the response as JSON
|
|
306
|
+
try {
|
|
307
|
+
return JSON.parse(responseText);
|
|
308
|
+
}
|
|
309
|
+
catch (err) {
|
|
310
|
+
throw new Error(`Invalid JSON response: ${err.message}`);
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
catch (err) {
|
|
314
|
+
// Handle network errors and other issues
|
|
315
|
+
if (err.name === "FetchError") {
|
|
316
|
+
throw new Error(`Network error: ${err.message}`);
|
|
317
|
+
}
|
|
318
|
+
// Re-throw the original error
|
|
319
|
+
throw err;
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
/**
|
|
323
|
+
* Service: Save image data to file system
|
|
324
|
+
* @param {Object} params - Function parameters
|
|
325
|
+
* @param {string} params.imageData - Base64 image data
|
|
326
|
+
* @param {string} params.filename - Optional filename
|
|
327
|
+
* @returns {Promise<string|null>} The saved image path or null if failed
|
|
328
|
+
*/
|
|
329
|
+
async function saveImageData({ imageData, filename, }) {
|
|
330
|
+
if (!imageData)
|
|
331
|
+
return null;
|
|
332
|
+
try {
|
|
333
|
+
const imageDir = path.join(process.cwd(), "public");
|
|
334
|
+
if (!fs.existsSync(imageDir)) {
|
|
335
|
+
fs.mkdirSync(imageDir, { recursive: true });
|
|
336
|
+
console.log(`\x1b[36mℹ Created directory: ${imageDir}\x1b[0m`);
|
|
337
|
+
}
|
|
338
|
+
let imagePath;
|
|
339
|
+
let base64Data;
|
|
340
|
+
if (filename) {
|
|
341
|
+
imagePath = path.join(imageDir, filename);
|
|
342
|
+
// When filename is provided, assume image_data is just the base64 string
|
|
343
|
+
const match = imageData.match(/^data:image\/[\w+]+\;base64,(.+)$/);
|
|
344
|
+
if (match === null || match === void 0 ? void 0 : match[1]) {
|
|
345
|
+
base64Data = match[1]; // Extract if full data URI is sent
|
|
346
|
+
}
|
|
347
|
+
else {
|
|
348
|
+
base64Data = imageData; // Assume raw base64
|
|
349
|
+
}
|
|
350
|
+
console.log(`\x1b[36mℹ Using provided filename: ${filename}\x1b[0m`);
|
|
351
|
+
}
|
|
352
|
+
else {
|
|
353
|
+
// Fallback to existing logic if filename is not provided
|
|
354
|
+
const match = imageData.match(/^data:image\/([\w+]+);base64,(.+)$/);
|
|
355
|
+
let imageExtension;
|
|
356
|
+
if ((match === null || match === void 0 ? void 0 : match[1]) && (match === null || match === void 0 ? void 0 : match[2])) {
|
|
357
|
+
imageExtension = match[1];
|
|
358
|
+
base64Data = match[2];
|
|
359
|
+
}
|
|
360
|
+
else {
|
|
361
|
+
base64Data = imageData;
|
|
362
|
+
imageExtension = "png";
|
|
363
|
+
console.log("\x1b[33m⚠ Image data URI prefix not found and no filename provided, defaulting to .png extension.\x1b[0m");
|
|
364
|
+
}
|
|
365
|
+
if (imageExtension === "jpeg")
|
|
366
|
+
imageExtension = "jpg";
|
|
367
|
+
if (imageExtension === "svg+xml")
|
|
368
|
+
imageExtension = "svg";
|
|
369
|
+
const imageName = `image_${Date.now()}.${imageExtension}`;
|
|
370
|
+
imagePath = path.join(imageDir, imageName);
|
|
371
|
+
}
|
|
372
|
+
const imageBuffer = Buffer.from(base64Data, "base64");
|
|
373
|
+
fs.writeFileSync(imagePath, imageBuffer);
|
|
374
|
+
console.log(`\x1b[32m✓ Image saved to ${imagePath}\x1b[0m`);
|
|
375
|
+
return imagePath;
|
|
376
|
+
}
|
|
377
|
+
catch (imgError) {
|
|
378
|
+
console.error(`\x1b[31m✗ Error saving image: ${imgError.message}\x1b[0m`);
|
|
379
|
+
return null;
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
/**
|
|
383
|
+
* Service: Read file content from encoded location
|
|
384
|
+
* @param {string} encodedLocation The encoded file location
|
|
385
|
+
* @returns {Object} File data with path and content
|
|
386
|
+
*/
|
|
387
|
+
function readFileFromEncodedLocation(encodedLocation) {
|
|
388
|
+
const encodedFilePath = encodedLocation.split(":")[0];
|
|
389
|
+
const filePath = (0, index_1.decode)(encodedFilePath);
|
|
390
|
+
console.log(`\x1b[36mℹ Decoded file path: ${filePath}\x1b[0m`);
|
|
391
|
+
// If filePath is absolute, use it directly. Otherwise, join with cwd.
|
|
392
|
+
const targetFile = path.isAbsolute(filePath)
|
|
393
|
+
? filePath
|
|
394
|
+
: path.join(process.cwd(), filePath);
|
|
395
|
+
console.log(`\x1b[36mℹ Reading file: ${targetFile}\x1b[0m`);
|
|
396
|
+
const fileContent = fs.readFileSync(targetFile, "utf8");
|
|
397
|
+
return { filePath, targetFile, fileContent };
|
|
398
|
+
}
|
|
399
|
+
/**
|
|
400
|
+
* Service: Get changes from backend (original endpoint)
|
|
401
|
+
* @param {Object} params Request parameters
|
|
402
|
+
* @param {string} params.githubRepoName The GitHub repository name
|
|
403
|
+
* @param {Array<Object>} params.fileChanges Array of file change objects to process. Each object represents changes for a single file.
|
|
404
|
+
* @param {string} params.fileChanges[].encoded_location The encoded file location identifier used to determine which file to modify
|
|
405
|
+
* @param {string} params.fileChanges[].file_content The current content of the file being modified
|
|
406
|
+
* @param {Array<Object>} params.fileChanges[].style_changes Array of style-related changes to apply to the file. Each object contains styling modifications.
|
|
407
|
+
* @param {Array<Object>} params.fileChanges[].text_changes Array of text-based changes to apply to the file. Each object contains:
|
|
408
|
+
* @param {string} params.fileChanges[].text_changes[].old_text The original HTML/text content to be replaced (mapped from old_html)
|
|
409
|
+
* @param {string} params.fileChanges[].text_changes[].new_text The new HTML/text content to replace with (mapped from new_html)
|
|
410
|
+
* @param {string} [params.fileChanges[].text_changes[].encoded_location] The encoded location for this specific text change (inherited from parent change object)
|
|
411
|
+
* @param {Array<Object>} [params.fileChanges[].text_changes[].style_changes] Any style changes associated with this text change (inherited from parent change object)
|
|
412
|
+
* @param {string} [params.authHeader] The authorization header for backend API authentication (Bearer token)
|
|
413
|
+
* @returns {Promise<Object>} Backend response containing the processed changes, typically with an 'updated_files' property mapping file paths to their new content
|
|
414
|
+
*/
|
|
415
|
+
async function getChanges({ githubRepoName, fileChanges, authHeader, }) {
|
|
416
|
+
console.log(`\x1b[36mℹ Getting changes from backend for ${fileChanges.length} files\x1b[0m`);
|
|
417
|
+
return await callBackendApi("POST", "code-sync/get-changes", {
|
|
418
|
+
github_repo_name: githubRepoName,
|
|
419
|
+
file_changes: fileChanges,
|
|
420
|
+
}, authHeader);
|
|
421
|
+
}
|
|
422
|
+
/**
|
|
423
|
+
* Service: Apply full file replacement and format code
|
|
424
|
+
* @param {string} modifiedContent The complete new file content
|
|
425
|
+
* @param {string} targetFile Target file path
|
|
426
|
+
* @returns {Promise<string>} Formatted code
|
|
427
|
+
*/
|
|
428
|
+
function pickPrettierParser(filePath) {
|
|
429
|
+
const lower = (filePath || "").toLowerCase();
|
|
430
|
+
if (lower.endsWith(".ts") || lower.endsWith(".tsx"))
|
|
431
|
+
return "typescript";
|
|
432
|
+
return "babel"; // default for .js/.jsx and others
|
|
433
|
+
}
|
|
434
|
+
async function tryFormatWithPrettierOrNull(code, filePath) {
|
|
435
|
+
try {
|
|
436
|
+
const result = await Promise.resolve(prettier.format(code, {
|
|
437
|
+
parser: pickPrettierParser(filePath),
|
|
438
|
+
semi: true,
|
|
439
|
+
singleQuote: false,
|
|
440
|
+
}));
|
|
441
|
+
return result;
|
|
442
|
+
}
|
|
443
|
+
catch (err) {
|
|
444
|
+
console.error("Error formatting code with Prettier:", err);
|
|
445
|
+
return null;
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
function closeUnclosedJsxTags(code) {
|
|
449
|
+
try {
|
|
450
|
+
const tagRegex = /<\/?([A-Za-z][A-Za-z0-9]*)\b[^>]*?\/?>(?!\s*<\!)/g;
|
|
451
|
+
const selfClosingRegex = /<([A-Za-z][A-Za-z0-9]*)\b[^>]*?\/>/;
|
|
452
|
+
const stack = [];
|
|
453
|
+
while (true) {
|
|
454
|
+
const nextMatch = tagRegex.exec(code);
|
|
455
|
+
if (nextMatch === null)
|
|
456
|
+
break;
|
|
457
|
+
const full = nextMatch[0];
|
|
458
|
+
const name = nextMatch[1];
|
|
459
|
+
const isClose = full.startsWith("</");
|
|
460
|
+
const isSelfClosing = selfClosingRegex.test(full);
|
|
461
|
+
if (isSelfClosing)
|
|
462
|
+
continue;
|
|
463
|
+
if (!isClose) {
|
|
464
|
+
stack.push(name);
|
|
465
|
+
}
|
|
466
|
+
else {
|
|
467
|
+
if (stack.length && stack[stack.length - 1] === name) {
|
|
468
|
+
stack.pop();
|
|
469
|
+
}
|
|
470
|
+
else {
|
|
471
|
+
const idx = stack.lastIndexOf(name);
|
|
472
|
+
if (idx !== -1)
|
|
473
|
+
stack.splice(idx, 1);
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
if (stack.length === 0)
|
|
478
|
+
return code;
|
|
479
|
+
let suffix = "";
|
|
480
|
+
for (let i = stack.length - 1; i >= 0; i--) {
|
|
481
|
+
const tag = stack[i];
|
|
482
|
+
suffix += `</${tag}>`;
|
|
483
|
+
}
|
|
484
|
+
return code + "\n" + suffix + "\n";
|
|
485
|
+
}
|
|
486
|
+
catch {
|
|
487
|
+
return code;
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
async function applyFullFileReplacement(modifiedContent, targetFile) {
|
|
491
|
+
console.log(`\x1b[36mℹ Applying full file replacement\x1b[0m`);
|
|
492
|
+
// Ensure folder
|
|
493
|
+
try {
|
|
494
|
+
const dir = path.dirname(targetFile);
|
|
495
|
+
if (!fs.existsSync(dir)) {
|
|
496
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
497
|
+
console.log(`\x1b[36mℹ Created directory: ${dir}\x1b[0m`);
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
catch (mkdirErr) {
|
|
501
|
+
console.error(`\x1b[31m✗ Failed to ensure directory for ${targetFile}: ${mkdirErr.message}\x1b[0m`);
|
|
502
|
+
}
|
|
503
|
+
let formattedCode = "";
|
|
504
|
+
if (typeof modifiedContent === "string") {
|
|
505
|
+
// Format with Prettier
|
|
506
|
+
try {
|
|
507
|
+
formattedCode = await prettier.format(modifiedContent, {
|
|
508
|
+
parser: pickPrettierParser(targetFile),
|
|
509
|
+
semi: true,
|
|
510
|
+
singleQuote: false,
|
|
511
|
+
});
|
|
512
|
+
}
|
|
513
|
+
catch (prettierError) {
|
|
514
|
+
console.error("Prettier formatting failed:", prettierError);
|
|
515
|
+
// If formatting fails, use the unformatted code
|
|
516
|
+
formattedCode = modifiedContent;
|
|
517
|
+
}
|
|
518
|
+
fs.writeFileSync(targetFile, formattedCode, "utf8");
|
|
519
|
+
}
|
|
520
|
+
else if (modifiedContent.type === "binary" && modifiedContent.base64) {
|
|
521
|
+
const buffer = Buffer.from(modifiedContent.base64, "base64");
|
|
522
|
+
formattedCode = "binary_encoded_file";
|
|
523
|
+
fs.writeFileSync(targetFile, buffer);
|
|
524
|
+
}
|
|
525
|
+
else {
|
|
526
|
+
console.warn(`Unknown file type for ${targetFile}, skipping`);
|
|
527
|
+
formattedCode = "";
|
|
528
|
+
}
|
|
529
|
+
console.log(`\x1b[32m✓ Updated file ${targetFile} with complete file replacement\x1b[0m`);
|
|
530
|
+
return formattedCode;
|
|
531
|
+
}
|
|
532
|
+
/**
|
|
533
|
+
* Handle streaming agent requests with Server-Sent Events
|
|
534
|
+
* @param {Object} params - Function parameters
|
|
535
|
+
* @param {Object} params.request - Fastify request object
|
|
536
|
+
* @param {Object} params.reply - Fastify reply object
|
|
537
|
+
* @param {Object} params.data - Request body data
|
|
538
|
+
* @param {string} params.authHeader - Authorization header
|
|
539
|
+
* @param {string} params.fileContent - The file content to process
|
|
540
|
+
*/
|
|
541
|
+
async function handleStreamingAgentRequest({ reply, data, authHeader, fileContent, }) {
|
|
542
|
+
const { encoded_location, github_repo_name, user_instruction, branch_name } = data;
|
|
543
|
+
// Debug request
|
|
544
|
+
console.log("[engine] handleStreamingAgentRequest start", {
|
|
545
|
+
encoded_location,
|
|
546
|
+
github_repo_name,
|
|
547
|
+
branch_name,
|
|
548
|
+
user_instruction_len: typeof user_instruction === "string"
|
|
549
|
+
? user_instruction.length
|
|
550
|
+
: undefined,
|
|
551
|
+
hasAuth: !!authHeader,
|
|
552
|
+
});
|
|
553
|
+
// Set up Server-Sent Events headers
|
|
554
|
+
reply.raw.writeHead(200, {
|
|
555
|
+
"Content-Type": "text/event-stream",
|
|
556
|
+
"Cache-Control": "no-cache",
|
|
557
|
+
Connection: "keep-alive",
|
|
558
|
+
"Access-Control-Allow-Origin": "*",
|
|
559
|
+
"Access-Control-Allow-Headers": "Cache-Control",
|
|
560
|
+
});
|
|
561
|
+
// Apply incremental updates to disk for hot reload
|
|
562
|
+
async function writeIncrementalUpdate(eventData) {
|
|
563
|
+
try {
|
|
564
|
+
console.log("[engine] writeIncrementalUpdate begin", {
|
|
565
|
+
type: eventData === null || eventData === void 0 ? void 0 : eventData.type,
|
|
566
|
+
file_path: eventData === null || eventData === void 0 ? void 0 : eventData.file_path,
|
|
567
|
+
});
|
|
568
|
+
if (eventData &&
|
|
569
|
+
eventData.type === "file_update" &&
|
|
570
|
+
eventData.file_path &&
|
|
571
|
+
typeof eventData.content === "string") {
|
|
572
|
+
// Normalize .tmp paths emitted by editors before writing
|
|
573
|
+
let normalizedPath = eventData.file_path;
|
|
574
|
+
if (normalizedPath.includes(".tmp.")) {
|
|
575
|
+
const tmpIdx = normalizedPath.indexOf(".tmp.");
|
|
576
|
+
normalizedPath = normalizedPath.slice(0, tmpIdx);
|
|
577
|
+
}
|
|
578
|
+
const targetFilePath = toAbsolutePath(normalizedPath);
|
|
579
|
+
const candidate = eventData.content;
|
|
580
|
+
let formatted = await tryFormatWithPrettierOrNull(candidate, targetFilePath);
|
|
581
|
+
if (!formatted) {
|
|
582
|
+
const closed = closeUnclosedJsxTags(candidate);
|
|
583
|
+
formatted = await tryFormatWithPrettierOrNull(closed, targetFilePath);
|
|
584
|
+
}
|
|
585
|
+
if (formatted) {
|
|
586
|
+
await applyFullFileReplacement(formatted, targetFilePath);
|
|
587
|
+
}
|
|
588
|
+
else {
|
|
589
|
+
console.warn("Skipping incremental write due to unparseable content for", targetFilePath);
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
if (eventData &&
|
|
593
|
+
eventData.type === "final_result" &&
|
|
594
|
+
eventData.result &&
|
|
595
|
+
eventData.result.updated_files) {
|
|
596
|
+
const updatedEntries = Object.entries(eventData.result.updated_files);
|
|
597
|
+
for (const [filePath, newContent] of updatedEntries) {
|
|
598
|
+
let p = filePath;
|
|
599
|
+
if (p.includes(".tmp.")) {
|
|
600
|
+
p = p.slice(0, p.indexOf(".tmp."));
|
|
601
|
+
}
|
|
602
|
+
const targetFilePath = toAbsolutePath(p);
|
|
603
|
+
let formatted = await tryFormatWithPrettierOrNull(newContent, targetFilePath);
|
|
604
|
+
if (!formatted) {
|
|
605
|
+
const closed = closeUnclosedJsxTags(newContent);
|
|
606
|
+
formatted = await tryFormatWithPrettierOrNull(closed, targetFilePath);
|
|
607
|
+
}
|
|
608
|
+
await applyFullFileReplacement(formatted !== null && formatted !== void 0 ? formatted : newContent, targetFilePath);
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
catch (e) {
|
|
613
|
+
console.error("Failed to apply incremental update:", e);
|
|
614
|
+
}
|
|
615
|
+
}
|
|
616
|
+
// Function to send SSE data
|
|
617
|
+
function sendEvent(eventData) {
|
|
618
|
+
const data = JSON.stringify(eventData);
|
|
619
|
+
reply.raw.write(`data: ${data}\n\n`);
|
|
620
|
+
}
|
|
621
|
+
try {
|
|
622
|
+
// Call the backend for agent changes with streaming
|
|
623
|
+
// The backend will handle all streaming events from the agent
|
|
624
|
+
const backendResponse = await callBackendApiStreaming("POST", "v1/code-sync/get-agent-changes", {
|
|
625
|
+
github_repo_name: github_repo_name,
|
|
626
|
+
encoded_location: encoded_location,
|
|
627
|
+
file_content: fileContent,
|
|
628
|
+
branch_name: branch_name,
|
|
629
|
+
user_instruction: user_instruction,
|
|
630
|
+
}, authHeader, async (evt) => {
|
|
631
|
+
console.log("[engine] stream event", {
|
|
632
|
+
type: evt === null || evt === void 0 ? void 0 : evt.type,
|
|
633
|
+
file_path: evt === null || evt === void 0 ? void 0 : evt.file_path,
|
|
634
|
+
success: evt === null || evt === void 0 ? void 0 : evt.success,
|
|
635
|
+
});
|
|
636
|
+
await writeIncrementalUpdate(evt);
|
|
637
|
+
sendEvent(evt);
|
|
638
|
+
});
|
|
639
|
+
console.log(`\x1b[36mℹ backendResponse to agent: ${JSON.stringify(backendResponse)}\x1b[0m`);
|
|
640
|
+
// Handle the response and apply changes
|
|
641
|
+
if (backendResponse === null || backendResponse === void 0 ? void 0 : backendResponse.updated_files) {
|
|
642
|
+
const updatedFilePaths = [];
|
|
643
|
+
const updatedEntries = Object.entries(backendResponse.updated_files);
|
|
644
|
+
for (const [filePath, newContent] of updatedEntries) {
|
|
645
|
+
const targetFilePath = toAbsolutePath(filePath);
|
|
646
|
+
await applyFullFileReplacement(newContent, targetFilePath);
|
|
647
|
+
updatedFilePaths.push(filePath);
|
|
648
|
+
}
|
|
649
|
+
// Send final success event
|
|
650
|
+
sendEvent({
|
|
651
|
+
type: "final_result",
|
|
652
|
+
result: {
|
|
653
|
+
success: true,
|
|
654
|
+
updated_file_paths: updatedFilePaths,
|
|
655
|
+
},
|
|
656
|
+
success: true,
|
|
657
|
+
message: `✅ Changes applied successfully to ${updatedFilePaths.length} file(s)!`,
|
|
658
|
+
ephemeral: false,
|
|
659
|
+
});
|
|
660
|
+
}
|
|
661
|
+
else {
|
|
662
|
+
console.log(backendResponse);
|
|
663
|
+
throw new Error("No valid response from backend");
|
|
664
|
+
}
|
|
665
|
+
// Send completion event
|
|
666
|
+
sendEvent({ type: "complete" });
|
|
667
|
+
}
|
|
668
|
+
catch (error) {
|
|
669
|
+
console.error(`\x1b[31m✗ Error in streaming agent: ${error.message}\x1b[0m`);
|
|
670
|
+
sendEvent({
|
|
671
|
+
type: "error",
|
|
672
|
+
error: error.message,
|
|
673
|
+
ephemeral: false,
|
|
674
|
+
});
|
|
675
|
+
}
|
|
676
|
+
reply.raw.end();
|
|
677
|
+
}
|
|
678
|
+
/**
|
|
679
|
+
* Create and configure the Fastify app
|
|
680
|
+
* @returns {Object} The configured Fastify instance
|
|
681
|
+
*/
|
|
682
|
+
function createApp() {
|
|
683
|
+
const app = (0, fastify_1.default)({
|
|
684
|
+
logger: false, // Disable built-in logging since we have custom logging
|
|
685
|
+
});
|
|
686
|
+
// Register CORS plugin
|
|
687
|
+
app.register(cors_1.default, {
|
|
688
|
+
origin: "*",
|
|
689
|
+
methods: ["GET", "POST", "OPTIONS", "PUT", "PATCH", "DELETE"],
|
|
690
|
+
allowedHeaders: [
|
|
691
|
+
"X-Requested-With",
|
|
692
|
+
"content-type",
|
|
693
|
+
"Authorization",
|
|
694
|
+
"Cache-Control",
|
|
695
|
+
"Accept",
|
|
696
|
+
],
|
|
697
|
+
credentials: true,
|
|
698
|
+
});
|
|
699
|
+
// Ping route
|
|
700
|
+
app.get("/ping", async (_request, reply) => {
|
|
701
|
+
return reply.code(200).type("text/plain").send("pong");
|
|
702
|
+
});
|
|
703
|
+
// Meta route
|
|
704
|
+
app.get("/meta", async (_request, reply) => {
|
|
705
|
+
// Try to get package version but don't fail if not available
|
|
706
|
+
let version = "0.0.0";
|
|
707
|
+
try {
|
|
708
|
+
// In production builds, use a relative path that works with the installed package structure
|
|
709
|
+
version = require("../package.json").version;
|
|
710
|
+
}
|
|
711
|
+
catch (err) {
|
|
712
|
+
console.error("Error getting package version:", err);
|
|
713
|
+
// Ignore error, use default version
|
|
714
|
+
}
|
|
715
|
+
return reply.code(200).send({
|
|
716
|
+
name: "Codepress Dev Server",
|
|
717
|
+
version: version,
|
|
718
|
+
environment: process.env.NODE_ENV || "development",
|
|
719
|
+
uptime: process.uptime(),
|
|
720
|
+
});
|
|
721
|
+
});
|
|
722
|
+
// Project structure route
|
|
723
|
+
app.get("/project-structure", async (_request, reply) => {
|
|
724
|
+
try {
|
|
725
|
+
const structure = getProjectStructure();
|
|
726
|
+
return reply.code(200).send({
|
|
727
|
+
success: true,
|
|
728
|
+
structure: structure,
|
|
729
|
+
});
|
|
730
|
+
}
|
|
731
|
+
catch (error) {
|
|
732
|
+
console.error("Error getting project structure:", error);
|
|
733
|
+
return reply.code(500).send({
|
|
734
|
+
success: false,
|
|
735
|
+
error: "Failed to get project structure",
|
|
736
|
+
message: error.message,
|
|
737
|
+
});
|
|
738
|
+
}
|
|
739
|
+
});
|
|
740
|
+
// Visual editor API route for regular agent changes
|
|
741
|
+
app.post("/visual-editor-api", async (request, reply) => {
|
|
742
|
+
try {
|
|
743
|
+
const { changes, github_repo_name } = request.body;
|
|
744
|
+
const authHeader = request.headers.authorization || request.headers["authorization"];
|
|
745
|
+
console.log(`\x1b[36mℹ Auth header received: ${authHeader ? "[PRESENT]" : "[MISSING]"}\x1b[0m`);
|
|
746
|
+
if (!Array.isArray(changes)) {
|
|
747
|
+
return reply.code(400).send({
|
|
748
|
+
error: "Invalid request format: 'changes' must be an array.",
|
|
749
|
+
});
|
|
750
|
+
}
|
|
751
|
+
console.log(`\x1b[36mℹ Visual Editor API Request: Received ${changes.length} changes for repo ${github_repo_name}\x1b[0m`);
|
|
752
|
+
const changesWithDimensions = changes.filter((change) => change.browser_width && change.browser_height);
|
|
753
|
+
if (changesWithDimensions.length > 0) {
|
|
754
|
+
const sampleChange = changesWithDimensions[0];
|
|
755
|
+
console.log(`\x1b[36mℹ Browser dimensions detected: ${sampleChange.browser_width}x${sampleChange.browser_height}\x1b[0m`);
|
|
756
|
+
}
|
|
757
|
+
else {
|
|
758
|
+
console.log(`\x1b[33m⚠ No browser dimensions found in changes\x1b[0m`);
|
|
759
|
+
}
|
|
760
|
+
const uniqueEncodedLocations = new Set();
|
|
761
|
+
const validChanges = [];
|
|
762
|
+
for (const change of changes) {
|
|
763
|
+
console.log(`\x1b[36mℹ change: ${JSON.stringify(change)}\x1b[0m`);
|
|
764
|
+
try {
|
|
765
|
+
if (!change.encoded_location) {
|
|
766
|
+
console.warn(`\x1b[33m⚠ Skipping change with missing encoded_location.\x1b[0m`);
|
|
767
|
+
continue;
|
|
768
|
+
}
|
|
769
|
+
const encodedFilePath = change.encoded_location.split(":")[0];
|
|
770
|
+
const targetFile = (0, index_1.decode)(encodedFilePath);
|
|
771
|
+
if (!targetFile) {
|
|
772
|
+
console.warn(`\x1b[33m⚠ Skipping change with undecodable file from encoded_location: ${change.encoded_location}.\x1b[0m`);
|
|
773
|
+
continue;
|
|
774
|
+
}
|
|
775
|
+
const hasStyleChanges = change.style_changes && change.style_changes.length > 0;
|
|
776
|
+
const hasTextChanges = change.text_changes && change.text_changes.length > 0;
|
|
777
|
+
const hasMoveChanges = change.move_changes && change.move_changes.length > 0;
|
|
778
|
+
if (!hasStyleChanges && !hasTextChanges && !hasMoveChanges) {
|
|
779
|
+
console.warn(`\x1b[33m⚠ Skipping change with no style, text, or move changes.\x1b[0m`);
|
|
780
|
+
continue;
|
|
781
|
+
}
|
|
782
|
+
uniqueEncodedLocations.add(change.encoded_location);
|
|
783
|
+
validChanges.push(change);
|
|
784
|
+
}
|
|
785
|
+
catch (err) {
|
|
786
|
+
console.error(`\x1b[31m✖ Error processing change for location: ${change.encoded_location}\x1b[0m`, err);
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
const fileContentMap = new Map();
|
|
790
|
+
uniqueEncodedLocations.forEach((encodedLocation) => {
|
|
791
|
+
try {
|
|
792
|
+
const { fileContent } = readFileFromEncodedLocation(encodedLocation);
|
|
793
|
+
fileContentMap.set(encodedLocation, fileContent);
|
|
794
|
+
}
|
|
795
|
+
catch (err) {
|
|
796
|
+
console.error(`\x1b[31m✖ Error reading file for location: ${encodedLocation}\x1b[0m`, err);
|
|
797
|
+
}
|
|
798
|
+
});
|
|
799
|
+
console.log(`\x1b[36mℹ Pre-fetched ${fileContentMap.size} unique files for ${validChanges.length} changes\x1b[0m`);
|
|
800
|
+
const fileChangesForBackend = [];
|
|
801
|
+
for (const change of validChanges) {
|
|
802
|
+
try {
|
|
803
|
+
const fileContent = fileContentMap.get(change.encoded_location);
|
|
804
|
+
if (!fileContent) {
|
|
805
|
+
console.warn(`\x1b[33m⚠ Skipping change with missing file content for: ${change.encoded_location}\x1b[0m`);
|
|
806
|
+
continue;
|
|
807
|
+
}
|
|
808
|
+
fileChangesForBackend.push({
|
|
809
|
+
encoded_location: change.encoded_location,
|
|
810
|
+
file_content: fileContent,
|
|
811
|
+
changes: [
|
|
812
|
+
{
|
|
813
|
+
style_changes: change.style_changes || [],
|
|
814
|
+
text_changes: change.text_changes || [],
|
|
815
|
+
move_changes: change.move_changes || [],
|
|
816
|
+
},
|
|
817
|
+
],
|
|
818
|
+
browser_width: change.browser_width,
|
|
819
|
+
browser_height: change.browser_height,
|
|
820
|
+
});
|
|
821
|
+
}
|
|
822
|
+
catch (err) {
|
|
823
|
+
console.error(`\x1b[31m✖ Error processing change for location: ${change.encoded_location}\x1b[0m`, err);
|
|
824
|
+
}
|
|
825
|
+
}
|
|
826
|
+
for (const change of changes) {
|
|
827
|
+
if (change.image_data && change.filename) {
|
|
828
|
+
await saveImageData({
|
|
829
|
+
imageData: change.image_data,
|
|
830
|
+
filename: change.filename,
|
|
831
|
+
});
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
if (fileChangesForBackend.length === 0) {
|
|
835
|
+
return reply.code(200).send({
|
|
836
|
+
message: "No changes to apply.",
|
|
837
|
+
updatedFiles: [],
|
|
838
|
+
});
|
|
839
|
+
}
|
|
840
|
+
console.log(`\x1b[36mℹ Sending request for ${fileChangesForBackend.length} individual changes (${changes.length} total original changes)\x1b[0m`);
|
|
841
|
+
const backendChangesWithDimensions = fileChangesForBackend.filter((change) => change.browser_width && change.browser_height);
|
|
842
|
+
if (backendChangesWithDimensions.length > 0) {
|
|
843
|
+
console.log(`\x1b[36mℹ Sending browser dimensions to backend for ${backendChangesWithDimensions.length} changes\x1b[0m`);
|
|
844
|
+
}
|
|
845
|
+
const backendResponse = await getChanges({
|
|
846
|
+
githubRepoName: github_repo_name,
|
|
847
|
+
fileChanges: fileChangesForBackend,
|
|
848
|
+
authHeader,
|
|
849
|
+
});
|
|
850
|
+
const updatedFiles = new Set();
|
|
851
|
+
if (backendResponse === null || backendResponse === void 0 ? void 0 : backendResponse.updated_files) {
|
|
852
|
+
console.log(`\x1b[36mℹ Processing updated_files format\x1b[0m`);
|
|
853
|
+
const updatedEntries = Object.entries(backendResponse.updated_files);
|
|
854
|
+
for (const [filePath, newContent] of updatedEntries) {
|
|
855
|
+
const targetFile = toAbsolutePath(filePath);
|
|
856
|
+
await applyFullFileReplacement(newContent, targetFile);
|
|
857
|
+
updatedFiles.add(targetFile);
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
if (updatedFiles.size === 0) {
|
|
861
|
+
return reply.code(200).send({
|
|
862
|
+
message: "No changes were applied.",
|
|
863
|
+
updatedFiles: [],
|
|
864
|
+
});
|
|
865
|
+
}
|
|
866
|
+
return reply.code(200).send({
|
|
867
|
+
message: `Changes applied successfully to ${updatedFiles.size} file(s). Processed ${changes.length} individual changes with preserved line number information.`,
|
|
868
|
+
updatedFiles: Array.from(updatedFiles),
|
|
869
|
+
});
|
|
870
|
+
}
|
|
871
|
+
catch (err) {
|
|
872
|
+
console.error(`\x1b[31m✖ Fatal error in /visual-editor-api: ${err.message}\x1b[0m`);
|
|
873
|
+
return reply.code(500).send({
|
|
874
|
+
error: "An internal server error occurred",
|
|
875
|
+
details: err.message,
|
|
876
|
+
});
|
|
877
|
+
}
|
|
878
|
+
});
|
|
879
|
+
// Visual editor API route for agent changes
|
|
880
|
+
app.post("/visual-editor-api-agent", async (request, reply) => {
|
|
881
|
+
try {
|
|
882
|
+
const data = request.body;
|
|
883
|
+
const { encoded_location, image_data, filename } = data;
|
|
884
|
+
const authHeader = request.headers.authorization || request.headers["authorization"];
|
|
885
|
+
console.log(`\x1b[36mℹ [visual-editor-api-agent] Auth header received: ${authHeader ? "[PRESENT]" : "[MISSING]"}, Always streaming\x1b[0m`);
|
|
886
|
+
console.log("[engine] /visual-editor-api-agent", {
|
|
887
|
+
encoded_location,
|
|
888
|
+
hasImage: !!image_data,
|
|
889
|
+
filename,
|
|
890
|
+
});
|
|
891
|
+
if (!encoded_location) {
|
|
892
|
+
return reply.code(400).send({ error: "Missing encoded_location" });
|
|
893
|
+
}
|
|
894
|
+
const { fileContent } = readFileFromEncodedLocation(encoded_location);
|
|
895
|
+
// Save image data before processing
|
|
896
|
+
await saveImageData({ imageData: image_data, filename });
|
|
897
|
+
// Always use streaming for agent requests
|
|
898
|
+
return await handleStreamingAgentRequest({
|
|
899
|
+
reply,
|
|
900
|
+
data,
|
|
901
|
+
authHeader,
|
|
902
|
+
fileContent,
|
|
903
|
+
});
|
|
904
|
+
}
|
|
905
|
+
catch (err) {
|
|
906
|
+
console.error(`Error in /visual-editor-api-agent: ${err.message}`);
|
|
907
|
+
return reply.code(500).send({ error: err.message });
|
|
908
|
+
}
|
|
909
|
+
});
|
|
910
|
+
// Endpoint to write files to local filesystem (for local mode)
|
|
911
|
+
app.post("/write-files", async (request, reply) => {
|
|
912
|
+
try {
|
|
913
|
+
const { updated_files } = request.body;
|
|
914
|
+
console.log("[engine] /write-files incoming", {
|
|
915
|
+
files: updated_files ? Object.keys(updated_files).length : 0,
|
|
916
|
+
});
|
|
917
|
+
if (!updated_files || typeof updated_files !== "object") {
|
|
918
|
+
return reply.code(400).send({
|
|
919
|
+
error: "Missing or invalid updated_files object",
|
|
920
|
+
});
|
|
921
|
+
}
|
|
922
|
+
const writtenFiles = [];
|
|
923
|
+
const updatedEntries = Object.entries(updated_files);
|
|
924
|
+
for (const [rawPath, newContent] of updatedEntries) {
|
|
925
|
+
try {
|
|
926
|
+
let filePath = rawPath;
|
|
927
|
+
if (filePath.includes(".tmp.")) {
|
|
928
|
+
filePath = filePath.slice(0, filePath.indexOf(".tmp."));
|
|
929
|
+
}
|
|
930
|
+
const targetFilePath = toAbsolutePath(filePath);
|
|
931
|
+
await applyFullFileReplacement(newContent, targetFilePath);
|
|
932
|
+
writtenFiles.push(targetFilePath);
|
|
933
|
+
console.log(`\x1b[32m✓ Wrote ${targetFilePath} to disk\x1b[0m`);
|
|
934
|
+
}
|
|
935
|
+
catch (writeErr) {
|
|
936
|
+
console.error(`\x1b[31m✗ Failed to write ${rawPath}: ${writeErr.message}\x1b[0m`);
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
return reply.code(200).send({
|
|
940
|
+
success: true,
|
|
941
|
+
written_files: writtenFiles,
|
|
942
|
+
});
|
|
943
|
+
}
|
|
944
|
+
catch (err) {
|
|
945
|
+
console.error(`\x1b[31m✗ Error in /write-files: ${err.message}\x1b[0m`);
|
|
946
|
+
return reply.code(500).send({ error: err.message });
|
|
947
|
+
}
|
|
948
|
+
});
|
|
949
|
+
return app;
|
|
950
|
+
}
|
|
951
|
+
/**
|
|
952
|
+
* Starts the Codepress development server if not already running
|
|
953
|
+
* @param {Object} options Server configuration options
|
|
954
|
+
* @param {number} [options.port=4321] Port to run the server on
|
|
955
|
+
* @returns {Object|null} The Fastify instance or null if already running
|
|
956
|
+
*/
|
|
957
|
+
async function startServer(options = {}) {
|
|
958
|
+
var _a;
|
|
959
|
+
// Only run in development environment
|
|
960
|
+
if (process.env.NODE_ENV === "production") {
|
|
961
|
+
return null;
|
|
962
|
+
}
|
|
963
|
+
// Return existing instance if already running
|
|
964
|
+
if (serverInstance) {
|
|
965
|
+
return serverInstance;
|
|
966
|
+
}
|
|
967
|
+
// Try to acquire lock to ensure only one server instance runs system-wide
|
|
968
|
+
if (!acquireLock()) {
|
|
969
|
+
return null;
|
|
970
|
+
}
|
|
971
|
+
// Get the fixed port
|
|
972
|
+
const port = (_a = options.port) !== null && _a !== void 0 ? _a : getServerPort();
|
|
973
|
+
try {
|
|
974
|
+
// Create the Fastify app
|
|
975
|
+
const app = createApp();
|
|
976
|
+
// Ensure lock is released when Fastify closes
|
|
977
|
+
app.addHook("onClose", () => {
|
|
978
|
+
releaseLock();
|
|
979
|
+
});
|
|
980
|
+
// Start the server
|
|
981
|
+
await app.listen({ port, host: "0.0.0.0" });
|
|
982
|
+
console.log(`\x1b[32m✅ Codepress Dev Server running at http://localhost:${port}\x1b[0m`);
|
|
983
|
+
// Save instance
|
|
984
|
+
serverInstance = app;
|
|
985
|
+
return app;
|
|
986
|
+
}
|
|
987
|
+
catch (err) {
|
|
988
|
+
if (err.code === "EADDRINUSE") {
|
|
989
|
+
console.log(`\x1b[33mℹ Codepress Dev Server: Port ${port} is already in use, server is likely already running\x1b[0m`);
|
|
990
|
+
}
|
|
991
|
+
else {
|
|
992
|
+
console.error("Codepress Dev Server error:", err);
|
|
993
|
+
}
|
|
994
|
+
// On startup failure, best-effort release
|
|
995
|
+
releaseLock();
|
|
996
|
+
return null;
|
|
997
|
+
}
|
|
998
|
+
}
|
|
999
|
+
/**
|
|
1000
|
+
* Check if a relative path should be excluded based on ignore patterns.
|
|
1001
|
+
* @param {string} relativePath - The path relative to the base directory.
|
|
1002
|
+
* @param {RegExp[]} excludePatterns - Compiled ignore patterns.
|
|
1003
|
+
* @returns {boolean} True if the path matches any exclude pattern.
|
|
1004
|
+
*/
|
|
1005
|
+
function shouldExclude(relativePath, excludePatterns) {
|
|
1006
|
+
return excludePatterns.some((pattern) => pattern.test(relativePath));
|
|
1007
|
+
}
|
|
1008
|
+
/**
|
|
1009
|
+
* Recursively collects file paths from a directory, respecting exclude patterns.
|
|
1010
|
+
* @param {string} dir - Directory to traverse.
|
|
1011
|
+
* @param {RegExp[]} excludePatterns - Compiled ignore patterns.
|
|
1012
|
+
* @param {string} [baseDir=dir] - Base directory used to compute relative paths.
|
|
1013
|
+
* @returns {string[]} A list of file paths relative to baseDir.
|
|
1014
|
+
*/
|
|
1015
|
+
function getFilesRecursively(dir, excludePatterns, baseDir = dir) {
|
|
1016
|
+
const files = [];
|
|
1017
|
+
try {
|
|
1018
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
1019
|
+
for (const entry of entries) {
|
|
1020
|
+
const fullPath = path.join(dir, entry.name);
|
|
1021
|
+
const relativePath = path.relative(baseDir, fullPath);
|
|
1022
|
+
if (shouldExclude(relativePath, excludePatterns)) {
|
|
1023
|
+
continue;
|
|
1024
|
+
}
|
|
1025
|
+
if (entry.isDirectory()) {
|
|
1026
|
+
files.push(...getFilesRecursively(fullPath, excludePatterns, baseDir));
|
|
1027
|
+
}
|
|
1028
|
+
else if (entry.isFile()) {
|
|
1029
|
+
files.push(relativePath);
|
|
1030
|
+
}
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
catch (error) {
|
|
1034
|
+
console.warn(`\x1b[33m⚠ Error reading directory ${dir}: ${error.message}\x1b[0m`);
|
|
1035
|
+
}
|
|
1036
|
+
return files;
|
|
1037
|
+
}
|
|
1038
|
+
/**
|
|
1039
|
+
* Get a list of files in the current project, respecting gitignore patterns
|
|
1040
|
+
* @returns {string} List of file paths, one per line
|
|
1041
|
+
*/
|
|
1042
|
+
function getProjectStructure() {
|
|
1043
|
+
try {
|
|
1044
|
+
// Read .gitignore patterns
|
|
1045
|
+
const gitignorePath = path.join(process.cwd(), ".gitignore");
|
|
1046
|
+
let excludePatterns = [
|
|
1047
|
+
/^\.git(\/.*)?$/, // Exclude .git directory by default
|
|
1048
|
+
];
|
|
1049
|
+
if (fs.existsSync(gitignorePath)) {
|
|
1050
|
+
const gitignoreContent = fs.readFileSync(gitignorePath, "utf8");
|
|
1051
|
+
const gitignorePatterns = gitignoreContent
|
|
1052
|
+
.split("\n")
|
|
1053
|
+
.map((line) => line.trim())
|
|
1054
|
+
.filter((line) => line && !line.startsWith("#")) // Remove empty lines and comments
|
|
1055
|
+
.map((pattern) => {
|
|
1056
|
+
// Convert gitignore patterns to regex patterns
|
|
1057
|
+
let regexPattern = pattern;
|
|
1058
|
+
// Handle negation patterns (starting with !)
|
|
1059
|
+
if (pattern.startsWith("!")) {
|
|
1060
|
+
// Skip negation patterns for now as they're complex to implement
|
|
1061
|
+
return null;
|
|
1062
|
+
}
|
|
1063
|
+
// Remove leading slash if present (gitignore treats /pattern as root-relative)
|
|
1064
|
+
if (regexPattern.startsWith("/")) {
|
|
1065
|
+
regexPattern = regexPattern.substring(1);
|
|
1066
|
+
}
|
|
1067
|
+
// Remove trailing slash for directories
|
|
1068
|
+
if (regexPattern.endsWith("/")) {
|
|
1069
|
+
regexPattern = regexPattern.substring(0, regexPattern.length - 1);
|
|
1070
|
+
}
|
|
1071
|
+
// Escape special regex characters except * and ?
|
|
1072
|
+
regexPattern = regexPattern
|
|
1073
|
+
.replace(/\./g, "\\.") // Escape dots
|
|
1074
|
+
.replace(/\+/g, "\\+") // Escape plus
|
|
1075
|
+
.replace(/\^/g, "\\^") // Escape caret
|
|
1076
|
+
.replace(/\$/g, "\\$") // Escape dollar
|
|
1077
|
+
.replace(/\(/g, "\\(") // Escape parentheses
|
|
1078
|
+
.replace(/\)/g, "\\)")
|
|
1079
|
+
.replace(/\[/g, "\\[") // Escape brackets
|
|
1080
|
+
.replace(/\]/g, "\\]")
|
|
1081
|
+
.replace(/\{/g, "\\{") // Escape braces
|
|
1082
|
+
.replace(/\}/g, "\\}")
|
|
1083
|
+
.replace(/\|/g, "\\|"); // Escape pipe
|
|
1084
|
+
// Convert gitignore wildcards to regex
|
|
1085
|
+
regexPattern = regexPattern
|
|
1086
|
+
.replace(/\*\*/g, ".*") // ** matches any number of directories
|
|
1087
|
+
.replace(/\*/g, "[^/]*") // * matches anything except path separator
|
|
1088
|
+
.replace(/\?/g, "[^/]"); // ? matches single character except path separator
|
|
1089
|
+
// Create regex pattern for matching file paths
|
|
1090
|
+
if (!regexPattern.includes("/")) {
|
|
1091
|
+
// If no slash, match files/directories at any level
|
|
1092
|
+
regexPattern = `(^|/)${regexPattern}(/.*)?$`;
|
|
1093
|
+
}
|
|
1094
|
+
else {
|
|
1095
|
+
// If contains slash, match from start
|
|
1096
|
+
regexPattern = `^${regexPattern}(/.*)?$`;
|
|
1097
|
+
}
|
|
1098
|
+
try {
|
|
1099
|
+
return new RegExp(regexPattern);
|
|
1100
|
+
}
|
|
1101
|
+
catch (error) {
|
|
1102
|
+
console.warn(`\x1b[33m⚠ Invalid regex pattern for "${pattern}": ${error.message}\x1b[0m`);
|
|
1103
|
+
return null;
|
|
1104
|
+
}
|
|
1105
|
+
})
|
|
1106
|
+
.filter((regex) => regex !== null); // Remove null entries with type guard
|
|
1107
|
+
// Combine default patterns with gitignore patterns
|
|
1108
|
+
excludePatterns = [...excludePatterns, ...gitignorePatterns];
|
|
1109
|
+
console.log(`\x1b[36mℹ Found ${gitignorePatterns.length} valid gitignore patterns\x1b[0m`);
|
|
1110
|
+
}
|
|
1111
|
+
else {
|
|
1112
|
+
console.log(`\x1b[33m⚠ No .gitignore file found, no exclusions applied\x1b[0m`);
|
|
1113
|
+
}
|
|
1114
|
+
const fileList = getFilesRecursively(process.cwd(), excludePatterns);
|
|
1115
|
+
console.log(`\x1b[36mℹ Generated file list with ${fileList.length} files\x1b[0m`);
|
|
1116
|
+
// Return as a formatted string with one file per line
|
|
1117
|
+
return fileList.sort().join("\n");
|
|
1118
|
+
}
|
|
1119
|
+
catch (error) {
|
|
1120
|
+
console.error(`Error generating project structure: ${error.message}`);
|
|
1121
|
+
return "Unable to generate project structure";
|
|
1122
|
+
}
|
|
1123
|
+
}
|
|
1124
|
+
const serverModule = {
|
|
1125
|
+
startServer,
|
|
1126
|
+
createApp,
|
|
1127
|
+
getProjectStructure,
|
|
1128
|
+
};
|
|
1129
|
+
if (process.env.NODE_ENV !== "production") {
|
|
1130
|
+
(async () => {
|
|
1131
|
+
try {
|
|
1132
|
+
serverModule.server = await startServer();
|
|
1133
|
+
}
|
|
1134
|
+
catch (err) {
|
|
1135
|
+
console.error("Failed to auto-start server:", err);
|
|
1136
|
+
}
|
|
1137
|
+
})();
|
|
1138
|
+
}
|
|
1139
|
+
module.exports = serverModule;
|
|
1140
|
+
//# sourceMappingURL=server.js.map
|