@getjack/jack 0.1.16 → 0.1.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,346 @@
1
+ /**
2
+ * SQL Risk Classification
3
+ *
4
+ * Classifies SQL statements by risk level to enable security guardrails:
5
+ * - read: SELECT, EXPLAIN, PRAGMA (read-only) - safe to run
6
+ * - write: INSERT, UPDATE, DELETE (with WHERE) - requires --write flag
7
+ * - destructive: DROP, TRUNCATE, DELETE (no WHERE), ALTER - requires --write + confirmation
8
+ *
9
+ * Uses simple regex-based classification since D1 uses SQLite with a limited SQL surface.
10
+ */
11
+
12
+ export type RiskLevel = "read" | "write" | "destructive";
13
+
14
+ export interface ClassifiedStatement {
15
+ sql: string;
16
+ risk: RiskLevel;
17
+ operation: string; // SELECT, INSERT, DROP, etc.
18
+ }
19
+
20
+ /**
21
+ * Patterns for detecting SQL operations.
22
+ * Order matters for operations that might overlap.
23
+ */
24
+ const SQL_PATTERNS = {
25
+ // Read operations (safe)
26
+ select: /^\s*SELECT\b/i,
27
+ explain: /^\s*EXPLAIN\b/i,
28
+ pragma_read: /^\s*PRAGMA\s+\w+\s*(?:;?\s*$|\()/i, // PRAGMA table_info(...) or PRAGMA journal_mode;
29
+ // CTE (WITH) - handled specially in classifyStatement based on actual operation
30
+
31
+ // Destructive operations (dangerous - require confirmation)
32
+ drop: /^\s*DROP\b/i,
33
+ truncate: /^\s*TRUNCATE\b/i,
34
+ alter: /^\s*ALTER\b/i,
35
+
36
+ // Write operations (require --write flag)
37
+ insert: /^\s*INSERT\b/i,
38
+ update: /^\s*UPDATE\b/i,
39
+ delete: /^\s*DELETE\b/i,
40
+ replace: /^\s*REPLACE\b/i,
41
+ create: /^\s*CREATE\b/i,
42
+ pragma_write: /^\s*PRAGMA\s+\w+\s*=\s*/i, // PRAGMA setting = value
43
+
44
+ // CTE pattern (just to detect WITH statements)
45
+ with_cte: /^\s*WITH\b/i,
46
+ };
47
+
48
+ /**
49
+ * Strip comments from SQL for classification purposes.
50
+ * Preserves the actual SQL content after comments.
51
+ */
52
+ function stripComments(sql: string): string {
53
+ return sql
54
+ .replace(/--.*$/gm, "") // Single line comments
55
+ .replace(/\/\*[\s\S]*?\*\//g, "") // Multi-line comments
56
+ .trim();
57
+ }
58
+
59
+ /**
60
+ * Check if a DELETE statement has a WHERE clause.
61
+ * DELETE without WHERE is destructive (deletes all rows).
62
+ */
63
+ function isDeleteWithoutWhere(sql: string): boolean {
64
+ // Remove comments and normalize whitespace
65
+ const cleaned = sql
66
+ .replace(/--.*$/gm, "") // Single line comments
67
+ .replace(/\/\*[\s\S]*?\*\//g, "") // Multi-line comments
68
+ .replace(/\s+/g, " ")
69
+ .trim();
70
+
71
+ // Check if it's a DELETE statement
72
+ if (!SQL_PATTERNS.delete.test(cleaned)) {
73
+ return false;
74
+ }
75
+
76
+ // Check for WHERE clause (case-insensitive)
77
+ // Match WHERE followed by anything (column name, space, etc.)
78
+ const hasWhere = /\bWHERE\b/i.test(cleaned);
79
+
80
+ return !hasWhere;
81
+ }
82
+
83
+ /**
84
+ * Extract the primary operation from a SQL statement.
85
+ * For CTEs (WITH clauses), finds the actual operation after the CTE definitions.
86
+ */
87
+ function extractOperation(sql: string): string {
88
+ const cleaned = sql.trim().toUpperCase();
89
+
90
+ // Handle WITH clauses (CTEs)
91
+ // The actual operation comes after all the CTE definitions end
92
+ // Pattern: WITH name AS (...), name2 AS (...) <ACTUAL_OPERATION>
93
+ if (cleaned.startsWith("WITH")) {
94
+ // Find the main operation by looking for DML keyword after CTE definitions close
95
+ // CTE definitions are enclosed in parentheses, so find the operation after the
96
+ // last `)` that matches the CTE pattern
97
+ // Look for: ) followed by optional whitespace then SELECT/INSERT/UPDATE/DELETE
98
+ const cteEndMatch = cleaned.match(/\)\s*(SELECT|INSERT|UPDATE|DELETE)\b/i);
99
+ if (cteEndMatch) {
100
+ return cteEndMatch[1].toUpperCase();
101
+ }
102
+
103
+ // Fallback: if no match, look for any of these operations
104
+ // (handles malformed or edge cases)
105
+ if (/\bDELETE\b/.test(cleaned)) return "DELETE";
106
+ if (/\bUPDATE\b/.test(cleaned)) return "UPDATE";
107
+ if (/\bINSERT\b/.test(cleaned)) return "INSERT";
108
+ if (/\bSELECT\b/.test(cleaned)) return "SELECT";
109
+
110
+ return "WITH";
111
+ }
112
+
113
+ // Extract first keyword
114
+ const match = cleaned.match(/^\s*(\w+)/);
115
+ return match?.[1] ?? "UNKNOWN";
116
+ }
117
+
118
+ /**
119
+ * Classify a single SQL statement by risk level.
120
+ */
121
+ export function classifyStatement(sql: string): ClassifiedStatement {
122
+ const trimmed = sql.trim();
123
+ // Strip comments for classification but preserve original SQL
124
+ const cleaned = stripComments(trimmed);
125
+ const operation = extractOperation(cleaned);
126
+
127
+ // Handle CTE (WITH) statements based on their actual operation
128
+ // This must come early because CTEs don't start with the operation keyword
129
+ if (SQL_PATTERNS.with_cte.test(cleaned)) {
130
+ // Classify based on the actual operation extracted from the CTE
131
+ switch (operation) {
132
+ case "DELETE":
133
+ // Check if DELETE in CTE has WHERE clause
134
+ // For CTEs, we check if DELETE...WHERE exists anywhere after the CTE defs
135
+ if (!/\bDELETE\b[^;]*\bWHERE\b/i.test(cleaned)) {
136
+ return { sql: trimmed, risk: "destructive", operation: "DELETE" };
137
+ }
138
+ return { sql: trimmed, risk: "write", operation: "DELETE" };
139
+ case "INSERT":
140
+ return { sql: trimmed, risk: "write", operation: "INSERT" };
141
+ case "UPDATE":
142
+ return { sql: trimmed, risk: "write", operation: "UPDATE" };
143
+ case "SELECT":
144
+ return { sql: trimmed, risk: "read", operation: "SELECT" };
145
+ default:
146
+ // Unknown CTE operation - default to write for safety
147
+ return { sql: trimmed, risk: "write", operation };
148
+ }
149
+ }
150
+
151
+ // Check destructive operations first (highest risk)
152
+ if (SQL_PATTERNS.drop.test(cleaned)) {
153
+ return { sql: trimmed, risk: "destructive", operation: "DROP" };
154
+ }
155
+
156
+ if (SQL_PATTERNS.truncate.test(cleaned)) {
157
+ return { sql: trimmed, risk: "destructive", operation: "TRUNCATE" };
158
+ }
159
+
160
+ if (SQL_PATTERNS.alter.test(cleaned)) {
161
+ return { sql: trimmed, risk: "destructive", operation: "ALTER" };
162
+ }
163
+
164
+ // DELETE without WHERE is destructive
165
+ if (isDeleteWithoutWhere(cleaned)) {
166
+ return { sql: trimmed, risk: "destructive", operation: "DELETE" };
167
+ }
168
+
169
+ // Check write operations
170
+ if (SQL_PATTERNS.insert.test(cleaned)) {
171
+ return { sql: trimmed, risk: "write", operation: "INSERT" };
172
+ }
173
+
174
+ if (SQL_PATTERNS.update.test(cleaned)) {
175
+ return { sql: trimmed, risk: "write", operation: "UPDATE" };
176
+ }
177
+
178
+ if (SQL_PATTERNS.delete.test(cleaned)) {
179
+ // Has WHERE clause (checked above)
180
+ return { sql: trimmed, risk: "write", operation: "DELETE" };
181
+ }
182
+
183
+ if (SQL_PATTERNS.replace.test(cleaned)) {
184
+ return { sql: trimmed, risk: "write", operation: "REPLACE" };
185
+ }
186
+
187
+ if (SQL_PATTERNS.create.test(cleaned)) {
188
+ return { sql: trimmed, risk: "write", operation: "CREATE" };
189
+ }
190
+
191
+ if (SQL_PATTERNS.pragma_write.test(cleaned)) {
192
+ return { sql: trimmed, risk: "write", operation: "PRAGMA" };
193
+ }
194
+
195
+ // Check read operations
196
+ if (SQL_PATTERNS.select.test(cleaned)) {
197
+ return { sql: trimmed, risk: "read", operation: "SELECT" };
198
+ }
199
+
200
+ if (SQL_PATTERNS.explain.test(cleaned)) {
201
+ return { sql: trimmed, risk: "read", operation: "EXPLAIN" };
202
+ }
203
+
204
+ if (SQL_PATTERNS.pragma_read.test(cleaned)) {
205
+ return { sql: trimmed, risk: "read", operation: "PRAGMA" };
206
+ }
207
+
208
+ // Unknown operations default to write for safety
209
+ return { sql: trimmed, risk: "write", operation };
210
+ }
211
+
212
+ /**
213
+ * Split SQL into individual statements.
214
+ * Handles semicolons inside strings and comments.
215
+ */
216
+ export function splitStatements(sql: string): string[] {
217
+ const statements: string[] = [];
218
+ let current = "";
219
+ let inString: string | null = null;
220
+ let inComment = false;
221
+ let inMultilineComment = false;
222
+
223
+ for (let i = 0; i < sql.length; i++) {
224
+ const char = sql[i];
225
+ const nextChar = sql[i + 1];
226
+
227
+ // Handle multiline comments
228
+ if (!inString && !inComment && char === "/" && nextChar === "*") {
229
+ inMultilineComment = true;
230
+ current += char;
231
+ continue;
232
+ }
233
+
234
+ if (inMultilineComment && char === "*" && nextChar === "/") {
235
+ current += "*/";
236
+ i++; // Skip the /
237
+ inMultilineComment = false;
238
+ continue;
239
+ }
240
+
241
+ if (inMultilineComment) {
242
+ current += char;
243
+ continue;
244
+ }
245
+
246
+ // Handle single-line comments
247
+ if (!inString && char === "-" && nextChar === "-") {
248
+ inComment = true;
249
+ current += char;
250
+ continue;
251
+ }
252
+
253
+ if (inComment && char === "\n") {
254
+ inComment = false;
255
+ current += char;
256
+ continue;
257
+ }
258
+
259
+ if (inComment) {
260
+ current += char;
261
+ continue;
262
+ }
263
+
264
+ // Handle strings
265
+ if (!inString && (char === "'" || char === '"')) {
266
+ inString = char;
267
+ current += char;
268
+ continue;
269
+ }
270
+
271
+ if (inString === char) {
272
+ // Check for escaped quote
273
+ if (nextChar === char) {
274
+ current += char + char;
275
+ i++; // Skip the escaped quote
276
+ continue;
277
+ }
278
+ inString = null;
279
+ current += char;
280
+ continue;
281
+ }
282
+
283
+ if (inString) {
284
+ current += char;
285
+ continue;
286
+ }
287
+
288
+ // Handle statement terminator
289
+ if (char === ";") {
290
+ const trimmed = current.trim();
291
+ if (trimmed) {
292
+ statements.push(trimmed);
293
+ }
294
+ current = "";
295
+ continue;
296
+ }
297
+
298
+ current += char;
299
+ }
300
+
301
+ // Add final statement if present
302
+ const trimmed = current.trim();
303
+ if (trimmed) {
304
+ statements.push(trimmed);
305
+ }
306
+
307
+ return statements;
308
+ }
309
+
310
+ /**
311
+ * Classify multiple SQL statements and return the highest risk level.
312
+ */
313
+ export function classifyStatements(sql: string): {
314
+ statements: ClassifiedStatement[];
315
+ highestRisk: RiskLevel;
316
+ } {
317
+ const statements = splitStatements(sql).map(classifyStatement);
318
+
319
+ // Find highest risk level
320
+ let highestRisk: RiskLevel = "read";
321
+ for (const stmt of statements) {
322
+ if (stmt.risk === "destructive") {
323
+ highestRisk = "destructive";
324
+ break;
325
+ }
326
+ if (stmt.risk === "write" && highestRisk === "read") {
327
+ highestRisk = "write";
328
+ }
329
+ }
330
+
331
+ return { statements, highestRisk };
332
+ }
333
+
334
+ /**
335
+ * Get a human-readable description of the risk level.
336
+ */
337
+ export function getRiskDescription(risk: RiskLevel): string {
338
+ switch (risk) {
339
+ case "read":
340
+ return "Read-only query";
341
+ case "write":
342
+ return "Write operation (modifies data)";
343
+ case "destructive":
344
+ return "Destructive operation (may cause data loss)";
345
+ }
346
+ }
@@ -43,8 +43,12 @@ export const DEFAULT_EXCLUDES: string[] = [
43
43
  ".git/**",
44
44
  ".env",
45
45
  ".env.*",
46
+ "*.env",
47
+ ".envrc",
46
48
  ".dev.vars",
47
49
  ".secrets.json",
50
+ "secrets.yaml",
51
+ "secrets.yml",
48
52
  "*.log",
49
53
  ".DS_Store",
50
54
  "dist/**",
@@ -33,6 +33,9 @@ export const Events = {
33
33
  AUTO_DETECT_SUCCESS: "auto_detect_success",
34
34
  AUTO_DETECT_FAILED: "auto_detect_failed",
35
35
  AUTO_DETECT_REJECTED: "auto_detect_rejected",
36
+ // Service events
37
+ SERVICE_CREATED: "service_created",
38
+ SQL_EXECUTED: "sql_executed",
36
39
  } as const;
37
40
 
38
41
  type EventName = (typeof Events)[keyof typeof Events];
@@ -0,0 +1,322 @@
1
+ /**
2
+ * Unit tests for wrangler-config.ts
3
+ *
4
+ * Tests adding D1 bindings to wrangler.jsonc while preserving comments.
5
+ */
6
+
7
+ import { afterEach, beforeEach, describe, expect, it } from "bun:test";
8
+ import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
9
+ import { join } from "node:path";
10
+ import { tmpdir } from "node:os";
11
+ import { addD1Binding, getExistingD1Bindings, type D1BindingConfig } from "./wrangler-config.ts";
12
+
13
+ // ============================================================================
14
+ // Test Helpers
15
+ // ============================================================================
16
+
17
+ let testDir: string;
18
+
19
+ function createTestConfig(content: string): string {
20
+ const configPath = join(testDir, "wrangler.jsonc");
21
+ writeFileSync(configPath, content);
22
+ return configPath;
23
+ }
24
+
25
+ async function readTestConfig(configPath: string): Promise<string> {
26
+ return await Bun.file(configPath).text();
27
+ }
28
+
29
+ // ============================================================================
30
+ // Tests
31
+ // ============================================================================
32
+
33
+ describe("wrangler-config", () => {
34
+ beforeEach(() => {
35
+ testDir = join(tmpdir(), `wrangler-config-test-${Date.now()}`);
36
+ mkdirSync(testDir, { recursive: true });
37
+ });
38
+
39
+ afterEach(() => {
40
+ if (existsSync(testDir)) {
41
+ rmSync(testDir, { recursive: true });
42
+ }
43
+ });
44
+
45
+ describe("getExistingD1Bindings", () => {
46
+ it("returns empty array when no d1_databases", async () => {
47
+ const configPath = createTestConfig(`{
48
+ "name": "test-app",
49
+ "main": "src/index.ts"
50
+ }`);
51
+
52
+ const bindings = await getExistingD1Bindings(configPath);
53
+
54
+ expect(bindings).toHaveLength(0);
55
+ });
56
+
57
+ it("returns existing D1 bindings", async () => {
58
+ const configPath = createTestConfig(`{
59
+ "name": "test-app",
60
+ "d1_databases": [
61
+ {
62
+ "binding": "DB",
63
+ "database_name": "my-db",
64
+ "database_id": "abc-123"
65
+ }
66
+ ]
67
+ }`);
68
+
69
+ const bindings = await getExistingD1Bindings(configPath);
70
+
71
+ expect(bindings).toHaveLength(1);
72
+ expect(bindings[0]).toEqual({
73
+ binding: "DB",
74
+ database_name: "my-db",
75
+ database_id: "abc-123",
76
+ });
77
+ });
78
+
79
+ it("returns multiple D1 bindings", async () => {
80
+ const configPath = createTestConfig(`{
81
+ "name": "test-app",
82
+ "d1_databases": [
83
+ {
84
+ "binding": "DB",
85
+ "database_name": "main-db",
86
+ "database_id": "abc-123"
87
+ },
88
+ {
89
+ "binding": "ANALYTICS_DB",
90
+ "database_name": "analytics-db",
91
+ "database_id": "def-456"
92
+ }
93
+ ]
94
+ }`);
95
+
96
+ const bindings = await getExistingD1Bindings(configPath);
97
+
98
+ expect(bindings).toHaveLength(2);
99
+ expect(bindings[0]?.binding).toBe("DB");
100
+ expect(bindings[1]?.binding).toBe("ANALYTICS_DB");
101
+ });
102
+
103
+ it("filters out incomplete bindings", async () => {
104
+ const configPath = createTestConfig(`{
105
+ "name": "test-app",
106
+ "d1_databases": [
107
+ {
108
+ "binding": "DB",
109
+ "database_name": "my-db"
110
+ }
111
+ ]
112
+ }`);
113
+
114
+ const bindings = await getExistingD1Bindings(configPath);
115
+
116
+ expect(bindings).toHaveLength(0);
117
+ });
118
+
119
+ it("throws error when config file does not exist", async () => {
120
+ const configPath = join(testDir, "nonexistent.jsonc");
121
+
122
+ expect(getExistingD1Bindings(configPath)).rejects.toThrow("wrangler.jsonc not found");
123
+ });
124
+
125
+ it("handles JSONC with comments", async () => {
126
+ const configPath = createTestConfig(`{
127
+ "name": "test-app",
128
+ // Database configuration
129
+ "d1_databases": [
130
+ {
131
+ "binding": "DB",
132
+ "database_name": "my-db", // main database
133
+ "database_id": "abc-123"
134
+ }
135
+ ]
136
+ }`);
137
+
138
+ const bindings = await getExistingD1Bindings(configPath);
139
+
140
+ expect(bindings).toHaveLength(1);
141
+ expect(bindings[0]?.binding).toBe("DB");
142
+ });
143
+ });
144
+
145
+ describe("addD1Binding", () => {
146
+ const testBinding: D1BindingConfig = {
147
+ binding: "DB",
148
+ database_name: "test-db",
149
+ database_id: "abc-123-def-456",
150
+ };
151
+
152
+ it("throws error when config file does not exist", async () => {
153
+ const configPath = join(testDir, "nonexistent.jsonc");
154
+
155
+ expect(addD1Binding(configPath, testBinding)).rejects.toThrow("wrangler.jsonc not found");
156
+ });
157
+
158
+ it("adds d1_databases section when it does not exist", async () => {
159
+ const configPath = createTestConfig(`{
160
+ "name": "test-app",
161
+ "main": "src/index.ts"
162
+ }`);
163
+
164
+ await addD1Binding(configPath, testBinding);
165
+
166
+ const content = await readTestConfig(configPath);
167
+ expect(content).toContain('"d1_databases"');
168
+ expect(content).toContain('"binding": "DB"');
169
+ expect(content).toContain('"database_name": "test-db"');
170
+ expect(content).toContain('"database_id": "abc-123-def-456"');
171
+ });
172
+
173
+ it("appends to existing d1_databases array", async () => {
174
+ const configPath = createTestConfig(`{
175
+ "name": "test-app",
176
+ "d1_databases": [
177
+ {
178
+ "binding": "MAIN_DB",
179
+ "database_name": "main-db",
180
+ "database_id": "existing-id"
181
+ }
182
+ ]
183
+ }`);
184
+
185
+ await addD1Binding(configPath, {
186
+ binding: "SECONDARY_DB",
187
+ database_name: "secondary-db",
188
+ database_id: "new-id",
189
+ });
190
+
191
+ const content = await readTestConfig(configPath);
192
+ expect(content).toContain('"binding": "MAIN_DB"');
193
+ expect(content).toContain('"binding": "SECONDARY_DB"');
194
+ });
195
+
196
+ it("preserves comments when adding d1_databases section", async () => {
197
+ const configPath = createTestConfig(`{
198
+ "name": "test-app",
199
+ // This is the main entry point
200
+ "main": "src/index.ts"
201
+ }`);
202
+
203
+ await addD1Binding(configPath, testBinding);
204
+
205
+ const content = await readTestConfig(configPath);
206
+ expect(content).toContain("// This is the main entry point");
207
+ expect(content).toContain('"d1_databases"');
208
+ });
209
+
210
+ it("preserves comments when appending to existing array", async () => {
211
+ const configPath = createTestConfig(`{
212
+ "name": "test-app",
213
+ // Database configuration
214
+ "d1_databases": [
215
+ {
216
+ "binding": "MAIN_DB",
217
+ "database_name": "main-db", // Primary database
218
+ "database_id": "existing-id"
219
+ }
220
+ ]
221
+ }`);
222
+
223
+ await addD1Binding(configPath, {
224
+ binding: "SECONDARY_DB",
225
+ database_name: "secondary-db",
226
+ database_id: "new-id",
227
+ });
228
+
229
+ const content = await readTestConfig(configPath);
230
+ expect(content).toContain("// Database configuration");
231
+ expect(content).toContain("// Primary database");
232
+ expect(content).toContain('"binding": "SECONDARY_DB"');
233
+ });
234
+
235
+ it("handles empty d1_databases array", async () => {
236
+ const configPath = createTestConfig(`{
237
+ "name": "test-app",
238
+ "d1_databases": []
239
+ }`);
240
+
241
+ await addD1Binding(configPath, testBinding);
242
+
243
+ const content = await readTestConfig(configPath);
244
+ expect(content).toContain('"binding": "DB"');
245
+ });
246
+
247
+ it("handles real-world miniapp template format", async () => {
248
+ const configPath = createTestConfig(`{
249
+ "name": "jack-template",
250
+ "main": "src/worker.ts",
251
+ "compatibility_date": "2024-12-01",
252
+ "assets": {
253
+ "binding": "ASSETS",
254
+ "directory": "dist/client",
255
+ "not_found_handling": "single-page-application",
256
+ // Required for dynamic routes (/share, /api/og) to work alongside static assets
257
+ // Without this, Cloudflare serves static files directly, bypassing the worker
258
+ "run_worker_first": true
259
+ },
260
+ "d1_databases": [
261
+ {
262
+ "binding": "DB",
263
+ "database_name": "jack-template-db"
264
+ }
265
+ ],
266
+ "ai": {
267
+ "binding": "AI"
268
+ },
269
+ "vars": {
270
+ // Set this after first deploy - required for share embeds
271
+ // Get your URL from: jack projects or wrangler deployments list
272
+ // Example: "APP_URL": "https://my-app.username.workers.dev"
273
+ "APP_URL": ""
274
+ }
275
+ }`);
276
+
277
+ await addD1Binding(configPath, {
278
+ binding: "ANALYTICS_DB",
279
+ database_name: "analytics-db",
280
+ database_id: "analytics-uuid",
281
+ });
282
+
283
+ const content = await readTestConfig(configPath);
284
+ // Verify original comments preserved
285
+ expect(content).toContain("// Required for dynamic routes");
286
+ expect(content).toContain("// Set this after first deploy");
287
+ // Verify new binding added
288
+ expect(content).toContain('"binding": "ANALYTICS_DB"');
289
+ expect(content).toContain('"database_name": "analytics-db"');
290
+ });
291
+
292
+ it("produces valid JSON output", async () => {
293
+ const configPath = createTestConfig(`{
294
+ "name": "test-app",
295
+ "main": "src/index.ts"
296
+ }`);
297
+
298
+ await addD1Binding(configPath, testBinding);
299
+
300
+ const content = await readTestConfig(configPath);
301
+ // Strip comments and parse
302
+ const { parseJsonc } = await import("./jsonc.ts");
303
+ const parsed = parseJsonc<{ d1_databases: D1BindingConfig[] }>(content);
304
+ expect(parsed.d1_databases).toBeDefined();
305
+ expect(parsed.d1_databases[0]?.binding).toBe("DB");
306
+ });
307
+
308
+ it("handles config with trailing comma", async () => {
309
+ const configPath = createTestConfig(`{
310
+ "name": "test-app",
311
+ "main": "src/index.ts",
312
+ }`);
313
+
314
+ await addD1Binding(configPath, testBinding);
315
+
316
+ const content = await readTestConfig(configPath);
317
+ const { parseJsonc } = await import("./jsonc.ts");
318
+ const parsed = parseJsonc<{ d1_databases: D1BindingConfig[] }>(content);
319
+ expect(parsed.d1_databases).toBeDefined();
320
+ });
321
+ });
322
+ });