@camox/api 0.2.0-alpha.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,463 @@
1
+ import { ORPCError } from "@orpc/server";
2
+ import { chat } from "@tanstack/ai";
3
+ import { createOpenRouterText } from "@tanstack/ai-openrouter";
4
+ import { and, eq, inArray, sql } from "drizzle-orm";
5
+ import { Hono } from "hono";
6
+ import { outdent } from "outdent";
7
+ import { z } from "zod";
8
+
9
+ import { assertFileAccess, getAuthorizedProject } from "../authorization";
10
+ import type { Database } from "../db";
11
+ import { broadcastInvalidation } from "../lib/broadcast-invalidation";
12
+ import { queryKeys } from "../lib/query-keys";
13
+ import { resolveEnvironment } from "../lib/resolve-environment";
14
+ import { scheduleAiJob } from "../lib/schedule-ai-job";
15
+ import { pub, authed } from "../orpc";
16
+ import { blocks, files, member, organizationTable, projects, repeatableItems } from "../schema";
17
+ import type { AppEnv } from "../types";
18
+
19
+ // --- AI Executor ---
20
+
21
+ async function generateImageMetadata(apiKey: string, imageUrl: string, currentFilename: string) {
22
+ // Fetch image server-side — the AI provider can't reach localhost URLs in development
23
+ const response = await fetch(imageUrl);
24
+ const buffer = await response.arrayBuffer();
25
+ const bytes = new Uint8Array(buffer);
26
+ let binary = "";
27
+ for (let i = 0; i < bytes.length; i++) binary += String.fromCharCode(bytes[i]);
28
+ const base64 = btoa(binary);
29
+ const mimeType = response.headers.get("content-type") || "image/jpeg";
30
+
31
+ return await chat({
32
+ adapter: createOpenRouterText("google/gemini-2.5-flash-lite", apiKey),
33
+ outputSchema: z.object({
34
+ filename: z.string(),
35
+ alt: z.string(),
36
+ }),
37
+ messages: [
38
+ {
39
+ role: "user",
40
+ content: [
41
+ {
42
+ type: "image" as const,
43
+ source: { type: "data" as const, value: base64, mimeType: mimeType as "image/png" },
44
+ },
45
+ {
46
+ type: "text" as const,
47
+ content: outdent`
48
+ Analyze this image and generate metadata for it:
49
+ - "filename": a clean, descriptive filename in kebab-case (no extension). The current filename is "${currentFilename}". If it's already human-readable and descriptive, keep it as-is (without the extension). Only rewrite it if it's gibberish, a random hash, or not meaningful (e.g. "IMG_2847", "DSC0042", "a7f3b2c9").
50
+ - "alt": SEO-optimized alt text describing the image content. Be concise but descriptive (1 sentence max).
51
+ `,
52
+ },
53
+ ],
54
+ },
55
+ ],
56
+ });
57
+ }
58
+
59
+ export async function executeFileMetadata(db: Database, apiKey: string, fileId: number) {
60
+ const file = await db.select().from(files).where(eq(files.id, fileId)).get();
61
+ if (!file || file.aiMetadataEnabled === false) return;
62
+
63
+ const metadata = await generateImageMetadata(apiKey, file.url, file.filename);
64
+
65
+ await db
66
+ .update(files)
67
+ .set({ filename: metadata.filename, alt: metadata.alt, updatedAt: Date.now() })
68
+ .where(eq(files.id, fileId));
69
+ }
70
+
71
+ // --- File reference cleanup ---
72
+
73
+ type JsonValue = string | number | boolean | null | JsonValue[] | { [key: string]: JsonValue };
74
+
75
+ function cleanFileReferences(value: JsonValue, fileId: number): JsonValue {
76
+ if (value === null || typeof value !== "object") return value;
77
+
78
+ if (Array.isArray(value)) {
79
+ return value
80
+ .filter((entry) => !containsFileRef(entry, fileId))
81
+ .map((entry) => cleanFileReferences(entry, fileId) as JsonValue);
82
+ }
83
+
84
+ // Object with _fileId — direct file reference
85
+ if ("_fileId" in value && value._fileId === fileId) return null;
86
+
87
+ // Recurse into object properties
88
+ const cleaned: Record<string, JsonValue> = {};
89
+ for (const [k, v] of Object.entries(value)) {
90
+ cleaned[k] = cleanFileReferences(v as JsonValue, fileId);
91
+ }
92
+ return cleaned;
93
+ }
94
+
95
+ function containsFileRef(value: JsonValue, fileId: number): boolean {
96
+ if (value === null || typeof value !== "object") return false;
97
+ if (Array.isArray(value)) return value.some((v) => containsFileRef(v, fileId));
98
+ if ("_fileId" in value && value._fileId === fileId) return true;
99
+ return Object.values(value).some((v) => containsFileRef(v as JsonValue, fileId));
100
+ }
101
+
102
+ async function removeFileReferences(db: Database, fileId: number) {
103
+ const marker = `"_fileId":${fileId}`;
104
+ const now = Date.now();
105
+
106
+ const affectedBlocks = await db
107
+ .select({ id: blocks.id, content: blocks.content, pageId: blocks.pageId })
108
+ .from(blocks)
109
+ .where(sql`INSTR(${blocks.content}, ${marker}) > 0`);
110
+
111
+ const affectedItems = await db
112
+ .select({
113
+ id: repeatableItems.id,
114
+ content: repeatableItems.content,
115
+ blockId: repeatableItems.blockId,
116
+ })
117
+ .from(repeatableItems)
118
+ .where(sql`INSTR(${repeatableItems.content}, ${marker}) > 0`);
119
+
120
+ for (const block of affectedBlocks) {
121
+ const cleaned = cleanFileReferences(block.content as JsonValue, fileId);
122
+ await db
123
+ .update(blocks)
124
+ .set({ content: cleaned, updatedAt: now })
125
+ .where(eq(blocks.id, block.id));
126
+ }
127
+
128
+ for (const item of affectedItems) {
129
+ const cleaned = cleanFileReferences(item.content as JsonValue, fileId);
130
+ await db
131
+ .update(repeatableItems)
132
+ .set({ content: cleaned, updatedAt: now })
133
+ .where(eq(repeatableItems.id, item.id));
134
+ }
135
+
136
+ const itemBlockIds = affectedItems.map((i) => i.blockId);
137
+ const allBlockIds = [...new Set([...affectedBlocks.map((b) => b.id), ...itemBlockIds])];
138
+
139
+ // Look up pageIds for blocks referenced by affected repeatable items
140
+ let itemBlockPageIds: number[] = [];
141
+ if (itemBlockIds.length > 0) {
142
+ const uniqueItemBlockIds = [...new Set(itemBlockIds)];
143
+ const parentBlocks = await db
144
+ .select({ id: blocks.id, pageId: blocks.pageId })
145
+ .from(blocks)
146
+ .where(inArray(blocks.id, uniqueItemBlockIds));
147
+ itemBlockPageIds = parentBlocks.map((b) => b.pageId).filter((id) => id != null);
148
+ }
149
+
150
+ const allPageIds = [
151
+ ...new Set([
152
+ ...affectedBlocks.map((b) => b.pageId).filter((id) => id != null),
153
+ ...itemBlockPageIds,
154
+ ]),
155
+ ];
156
+
157
+ return {
158
+ blockIds: allBlockIds,
159
+ blockPageIds: allPageIds,
160
+ itemIds: affectedItems.map((i) => i.id),
161
+ };
162
+ }
163
+
164
+ // --- oRPC Procedures ---
165
+
166
+ const list = pub.input(z.object({ projectId: z.number() })).handler(async ({ context, input }) => {
167
+ const environment = await resolveEnvironment(
168
+ context.db,
169
+ input.projectId,
170
+ context.environmentName,
171
+ );
172
+ return context.db
173
+ .select()
174
+ .from(files)
175
+ .where(and(eq(files.projectId, input.projectId), eq(files.environmentId, environment.id)));
176
+ });
177
+
178
+ const get = pub.input(z.object({ id: z.number() })).handler(async ({ context, input }) => {
179
+ const result = await context.db.select().from(files).where(eq(files.id, input.id)).get();
180
+ if (!result) throw new ORPCError("NOT_FOUND");
181
+ return result;
182
+ });
183
+
184
+ const getUsageCount = pub
185
+ .input(z.object({ id: z.number() }))
186
+ .handler(async ({ context, input }) => {
187
+ const file = await context.db.select().from(files).where(eq(files.id, input.id)).get();
188
+ if (!file) throw new ORPCError("NOT_FOUND");
189
+
190
+ const marker = `"_fileId":${file.id}`;
191
+ const blockCount = await context.db
192
+ .select({ count: sql<number>`count(*)` })
193
+ .from(blocks)
194
+ .where(sql`INSTR(${blocks.content}, ${marker}) > 0`)
195
+ .get();
196
+ const itemCount = await context.db
197
+ .select({ count: sql<number>`count(*)` })
198
+ .from(repeatableItems)
199
+ .where(sql`INSTR(${repeatableItems.content}, ${marker}) > 0`)
200
+ .get();
201
+ return { count: (blockCount?.count ?? 0) + (itemCount?.count ?? 0) };
202
+ });
203
+
204
+ const setAlt = authed
205
+ .input(z.object({ id: z.number(), alt: z.string() }))
206
+ .handler(async ({ context, input }) => {
207
+ const access = await assertFileAccess(context.db, input.id, context.user.id);
208
+ if (!access) throw new ORPCError("NOT_FOUND");
209
+
210
+ const result = await context.db
211
+ .update(files)
212
+ .set({ alt: input.alt, updatedAt: Date.now() })
213
+ .where(eq(files.id, input.id))
214
+ .returning()
215
+ .get();
216
+ broadcastInvalidation(context.env.ProjectRoom, access.file.projectId!, [
217
+ queryKeys.files.list,
218
+ queryKeys.files.get(input.id),
219
+ ]);
220
+ return result;
221
+ });
222
+
223
+ const setFilename = authed
224
+ .input(z.object({ id: z.number(), filename: z.string() }))
225
+ .handler(async ({ context, input }) => {
226
+ const access = await assertFileAccess(context.db, input.id, context.user.id);
227
+ if (!access) throw new ORPCError("NOT_FOUND");
228
+
229
+ const result = await context.db
230
+ .update(files)
231
+ .set({ filename: input.filename, updatedAt: Date.now() })
232
+ .where(eq(files.id, input.id))
233
+ .returning()
234
+ .get();
235
+ broadcastInvalidation(context.env.ProjectRoom, access.file.projectId!, [
236
+ queryKeys.files.list,
237
+ queryKeys.files.get(input.id),
238
+ ]);
239
+ return result;
240
+ });
241
+
242
+ const deleteFn = authed.input(z.object({ id: z.number() })).handler(async ({ context, input }) => {
243
+ const access = await assertFileAccess(context.db, input.id, context.user.id);
244
+ if (!access) throw new ORPCError("NOT_FOUND");
245
+
246
+ const { blockIds, blockPageIds, itemIds } = await removeFileReferences(context.db, input.id);
247
+
248
+ await context.env.FILES_BUCKET.delete(access.file.blobId);
249
+ const result = await context.db.delete(files).where(eq(files.id, input.id)).returning().get();
250
+ broadcastInvalidation(context.env.ProjectRoom, access.file.projectId!, [
251
+ queryKeys.files.list,
252
+ queryKeys.files.get(input.id),
253
+ ...blockIds.map((id) => queryKeys.blocks.get(id)),
254
+ ...blockPageIds.map((id) => queryKeys.blocks.getPageMarkdown(id)),
255
+ ...itemIds.map((id) => queryKeys.repeatableItems.get(id)),
256
+ ...(blockIds.length > 0 || itemIds.length > 0
257
+ ? [queryKeys.blocks.getUsageCounts, queryKeys.pages.getByPathAll]
258
+ : []),
259
+ ]);
260
+ return result;
261
+ });
262
+
263
+ const deleteMany = authed
264
+ .input(z.object({ ids: z.array(z.number()) }))
265
+ .handler(async ({ context, input }) => {
266
+ const { ids } = input;
267
+ if (ids.length === 0) return [];
268
+
269
+ const authorizedFiles = await context.db
270
+ .select({ id: files.id, blobId: files.blobId, projectId: files.projectId })
271
+ .from(files)
272
+ .innerJoin(projects, eq(projects.id, files.projectId))
273
+ .innerJoin(organizationTable, eq(organizationTable.slug, projects.organizationSlug))
274
+ .innerJoin(
275
+ member,
276
+ and(eq(member.organizationId, organizationTable.id), eq(member.userId, context.user.id)),
277
+ )
278
+ .where(inArray(files.id, ids));
279
+
280
+ if (authorizedFiles.length !== ids.length) {
281
+ throw new ORPCError("FORBIDDEN");
282
+ }
283
+
284
+ const allBlockIds: number[] = [];
285
+ const allBlockPageIds: number[] = [];
286
+ const allItemIds: number[] = [];
287
+ for (const id of ids) {
288
+ const { blockIds, blockPageIds, itemIds } = await removeFileReferences(context.db, id);
289
+ allBlockIds.push(...blockIds);
290
+ allBlockPageIds.push(...blockPageIds);
291
+ allItemIds.push(...itemIds);
292
+ }
293
+
294
+ await Promise.all(authorizedFiles.map((f) => context.env.FILES_BUCKET.delete(f.blobId)));
295
+ await context.db.delete(files).where(inArray(files.id, ids));
296
+
297
+ const projectId = authorizedFiles[0]!.projectId!;
298
+ const uniqueBlockIds = [...new Set(allBlockIds)];
299
+ const uniqueBlockPageIds = [...new Set(allBlockPageIds)];
300
+ const uniqueItemIds = [...new Set(allItemIds)];
301
+ broadcastInvalidation(context.env.ProjectRoom, projectId, [
302
+ queryKeys.files.list,
303
+ ...ids.map((id) => queryKeys.files.get(id)),
304
+ ...uniqueBlockIds.map((id) => queryKeys.blocks.get(id)),
305
+ ...uniqueBlockPageIds.map((id) => queryKeys.blocks.getPageMarkdown(id)),
306
+ ...uniqueItemIds.map((id) => queryKeys.repeatableItems.get(id)),
307
+ ...(uniqueBlockIds.length > 0 || uniqueItemIds.length > 0
308
+ ? [queryKeys.blocks.getUsageCounts, queryKeys.pages.getByPathAll]
309
+ : []),
310
+ ]);
311
+ return ids;
312
+ });
313
+
314
+ const replace = authed
315
+ .input(z.object({ id: z.number(), newFileId: z.number() }))
316
+ .handler(async ({ context, input }) => {
317
+ const oldAccess = await assertFileAccess(context.db, input.id, context.user.id);
318
+ const newAccess = await assertFileAccess(context.db, input.newFileId, context.user.id);
319
+ if (!oldAccess || !newAccess) throw new ORPCError("NOT_FOUND");
320
+
321
+ // Update all blocks that reference the old file URL
322
+ await context.db.run(
323
+ sql`UPDATE ${blocks} SET ${blocks.content} = REPLACE(CAST(${blocks.content} AS TEXT), ${oldAccess.file.url}, ${newAccess.file.url}), ${blocks.updatedAt} = ${Date.now()} WHERE INSTR(${blocks.content}, ${oldAccess.file.url}) > 0`,
324
+ );
325
+
326
+ broadcastInvalidation(context.env.ProjectRoom, oldAccess.file.projectId!, [
327
+ queryKeys.files.list,
328
+ queryKeys.files.get(input.id),
329
+ ]);
330
+ return { replaced: true };
331
+ });
332
+
333
+ const setAiMetadata = authed
334
+ .input(z.object({ id: z.number(), enabled: z.boolean() }))
335
+ .handler(async ({ context, input }) => {
336
+ const access = await assertFileAccess(context.db, input.id, context.user.id);
337
+ if (!access) throw new ORPCError("NOT_FOUND");
338
+
339
+ const result = await context.db
340
+ .update(files)
341
+ .set({ aiMetadataEnabled: input.enabled, updatedAt: Date.now() })
342
+ .where(eq(files.id, input.id))
343
+ .returning()
344
+ .get();
345
+ if (input.enabled) {
346
+ scheduleAiJob(context.env.AI_JOB_SCHEDULER, {
347
+ entityTable: "files",
348
+ entityId: input.id,
349
+ type: "fileMetadata",
350
+ delayMs: 0,
351
+ });
352
+ }
353
+ broadcastInvalidation(context.env.ProjectRoom, access.file.projectId!, [
354
+ queryKeys.files.list,
355
+ queryKeys.files.get(input.id),
356
+ ]);
357
+ return result;
358
+ });
359
+
360
+ const generateMetadata = authed
361
+ .input(z.object({ id: z.number() }))
362
+ .handler(async ({ context, input }) => {
363
+ const access = await assertFileAccess(context.db, input.id, context.user.id);
364
+ if (!access) throw new ORPCError("NOT_FOUND");
365
+
366
+ await executeFileMetadata(context.db, context.env.OPEN_ROUTER_API_KEY, input.id);
367
+ broadcastInvalidation(context.env.ProjectRoom, access.file.projectId!, [
368
+ queryKeys.files.list,
369
+ queryKeys.files.get(input.id),
370
+ ]);
371
+ const updated = await context.db.select().from(files).where(eq(files.id, input.id)).get();
372
+ return updated;
373
+ });
374
+
375
+ export const fileProcedures = {
376
+ list,
377
+ get,
378
+ getUsageCount,
379
+ setAlt,
380
+ setFilename,
381
+ delete: deleteFn,
382
+ deleteMany,
383
+ replace,
384
+ setAiMetadata,
385
+ generateMetadata,
386
+ };
387
+
388
+ // --- Hono routes (binary serving + multipart upload) ---
389
+
390
+ export const fileHonoRoutes = new Hono<AppEnv>();
391
+
392
+ fileHonoRoutes.get("/serve/*", async (c) => {
393
+ const key = c.req.path.replace(/^\/files\/serve\//, "");
394
+ if (!key) return c.json({ error: "Missing file key" }, 400);
395
+
396
+ const object = await c.env.FILES_BUCKET.get(key);
397
+ if (!object) return c.notFound();
398
+
399
+ return new Response(object.body, {
400
+ headers: {
401
+ "Content-Type": object.httpMetadata?.contentType ?? "application/octet-stream",
402
+ "Cache-Control": "public, max-age=31536000, immutable",
403
+ "Content-Disposition": "inline",
404
+ },
405
+ });
406
+ });
407
+
408
+ fileHonoRoutes.post("/upload", async (c) => {
409
+ if (!c.var.user) return c.json({ error: "Unauthorized" }, 401);
410
+
411
+ const body = await c.req.parseBody();
412
+ const file = body["file"];
413
+ const projectId = Number(body["projectId"]);
414
+
415
+ if (!(file instanceof File)) return c.json({ error: "Missing file" }, 400);
416
+ if (!projectId || Number.isNaN(projectId)) return c.json({ error: "Missing projectId" }, 400);
417
+
418
+ const project = await getAuthorizedProject(c.var.db, projectId, c.var.user.id);
419
+ if (!project) return c.json({ error: "Not found" }, 404);
420
+
421
+ const environment = await resolveEnvironment(c.var.db, projectId, c.var.environmentName);
422
+
423
+ const now = Date.now();
424
+ const key = `${projectId}/${now}-${file.name}`;
425
+
426
+ await c.env.FILES_BUCKET.put(key, file.stream(), {
427
+ httpMetadata: { contentType: file.type },
428
+ });
429
+
430
+ const apiOrigin = new URL(c.req.url).origin;
431
+ const url = `${apiOrigin}/files/serve/${key}`;
432
+
433
+ const result = await c.var.db
434
+ .insert(files)
435
+ .values({
436
+ projectId,
437
+ environmentId: environment.id,
438
+ blobId: key,
439
+ filename: file.name,
440
+ mimeType: file.type,
441
+ size: file.size,
442
+ path: key,
443
+ url,
444
+ alt: "",
445
+ createdAt: now,
446
+ updatedAt: now,
447
+ })
448
+ .returning()
449
+ .get();
450
+
451
+ scheduleAiJob(c.env.AI_JOB_SCHEDULER, {
452
+ entityTable: "files",
453
+ entityId: result.id,
454
+ type: "fileMetadata",
455
+ delayMs: 0,
456
+ });
457
+ broadcastInvalidation(c.env.ProjectRoom, projectId, [
458
+ queryKeys.files.list,
459
+ queryKeys.files.get(result.id),
460
+ ]);
461
+
462
+ return c.json(result, 201);
463
+ });
@@ -0,0 +1,164 @@
1
+ import { ORPCError } from "@orpc/server";
2
+ import { and, eq } from "drizzle-orm";
3
+ import { generateKeyBetween } from "fractional-indexing";
4
+ import { z } from "zod";
5
+
6
+ import { broadcastInvalidation } from "../lib/broadcast-invalidation";
7
+ import { queryKeys } from "../lib/query-keys";
8
+ import { resolveEnvironment } from "../lib/resolve-environment";
9
+ import { pub, synced } from "../orpc";
10
+ import { blocks, layouts, projects, repeatableItems } from "../schema";
11
+
12
+ // --- Procedures ---
13
+
14
+ const repeatableItemSeedSchema = z.object({
15
+ tempId: z.string(),
16
+ parentTempId: z.string().nullable(),
17
+ fieldName: z.string(),
18
+ content: z.unknown(),
19
+ position: z.string(),
20
+ });
21
+
22
+ const syncLayoutsSchema = z.object({
23
+ projectSlug: z.string(),
24
+ layouts: z.array(
25
+ z.object({
26
+ layoutId: z.string(),
27
+ description: z.string(),
28
+ blocks: z.array(
29
+ z.object({
30
+ type: z.string(),
31
+ content: z.unknown(),
32
+ settings: z.unknown().optional(),
33
+ placement: z.enum(["before", "after"]).optional(),
34
+ repeatableItems: z.array(repeatableItemSeedSchema).optional(),
35
+ }),
36
+ ),
37
+ }),
38
+ ),
39
+ });
40
+
41
+ const list = pub.input(z.object({ projectId: z.number() })).handler(async ({ context, input }) => {
42
+ const { projectId } = input;
43
+ const environment = await resolveEnvironment(context.db, projectId, context.environmentName);
44
+ return context.db
45
+ .select()
46
+ .from(layouts)
47
+ .where(and(eq(layouts.projectId, projectId), eq(layouts.environmentId, environment.id)));
48
+ });
49
+
50
+ const sync = synced.input(syncLayoutsSchema).handler(async ({ context, input }) => {
51
+ const { projectSlug, layouts: layoutDefs } = input;
52
+ const project = await context.db
53
+ .select()
54
+ .from(projects)
55
+ .where(eq(projects.slug, projectSlug))
56
+ .get();
57
+
58
+ if (!project) {
59
+ throw new ORPCError("NOT_FOUND");
60
+ }
61
+
62
+ const projectId = project.id;
63
+ const environment = await resolveEnvironment(context.db, projectId, context.environmentName, {
64
+ autoCreate: true,
65
+ });
66
+ const now = Date.now();
67
+ const results = [];
68
+
69
+ for (const def of layoutDefs) {
70
+ const existing = await context.db
71
+ .select()
72
+ .from(layouts)
73
+ .where(
74
+ and(
75
+ eq(layouts.projectId, projectId),
76
+ eq(layouts.environmentId, environment.id),
77
+ eq(layouts.layoutId, def.layoutId),
78
+ ),
79
+ )
80
+ .get();
81
+
82
+ if (existing) {
83
+ const updated = await context.db
84
+ .update(layouts)
85
+ .set({ description: def.description, updatedAt: now })
86
+ .where(eq(layouts.id, existing.id))
87
+ .returning()
88
+ .get();
89
+ results.push(updated);
90
+ continue;
91
+ }
92
+
93
+ const created = await context.db
94
+ .insert(layouts)
95
+ .values({
96
+ projectId,
97
+ environmentId: environment.id,
98
+ layoutId: def.layoutId,
99
+ description: def.description,
100
+ createdAt: now,
101
+ updatedAt: now,
102
+ })
103
+ .returning()
104
+ .get();
105
+ results.push(created);
106
+
107
+ // Create blocks for newly created layouts
108
+ let prevPosition: string | null = null;
109
+ for (const blockDef of def.blocks) {
110
+ const position = generateKeyBetween(prevPosition, null);
111
+ prevPosition = position;
112
+
113
+ const block = await context.db
114
+ .insert(blocks)
115
+ .values({
116
+ layoutId: created.id,
117
+ type: blockDef.type,
118
+ content: blockDef.content,
119
+ settings: blockDef.settings ?? null,
120
+ placement: blockDef.placement ?? null,
121
+ position,
122
+ summary: "",
123
+ createdAt: now,
124
+ updatedAt: now,
125
+ })
126
+ .returning()
127
+ .get();
128
+
129
+ const itemSeeds = blockDef.repeatableItems;
130
+ if (itemSeeds && itemSeeds.length > 0) {
131
+ const tempIdToRealId = new Map<string, number>();
132
+ for (const seed of itemSeeds) {
133
+ const parentItemId = seed.parentTempId
134
+ ? (tempIdToRealId.get(seed.parentTempId) ?? null)
135
+ : null;
136
+ const inserted = await context.db
137
+ .insert(repeatableItems)
138
+ .values({
139
+ blockId: block.id,
140
+ parentItemId,
141
+ fieldName: seed.fieldName,
142
+ content: seed.content,
143
+ summary: "",
144
+ position: seed.position,
145
+ createdAt: now,
146
+ updatedAt: now,
147
+ })
148
+ .returning()
149
+ .get();
150
+ tempIdToRealId.set(seed.tempId, inserted.id);
151
+ }
152
+ }
153
+ }
154
+ }
155
+
156
+ broadcastInvalidation(context.env.ProjectRoom, projectId, [
157
+ queryKeys.layouts.all,
158
+ queryKeys.pages.getByPathAll,
159
+ ]);
160
+
161
+ return results;
162
+ });
163
+
164
+ export const layoutProcedures = { list, sync };