dineway 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +9 -0
- package/README.md +89 -0
- package/dist/adapters-BlzWJG82.d.mts +106 -0
- package/dist/apply-CAPvMfoU.mjs +1339 -0
- package/dist/astro/index.d.mts +50 -0
- package/dist/astro/index.mjs +1326 -0
- package/dist/astro/middleware/auth.d.mts +30 -0
- package/dist/astro/middleware/auth.mjs +708 -0
- package/dist/astro/middleware/redirect.d.mts +21 -0
- package/dist/astro/middleware/redirect.mjs +62 -0
- package/dist/astro/middleware/request-context.d.mts +17 -0
- package/dist/astro/middleware/request-context.mjs +1371 -0
- package/dist/astro/middleware/setup.d.mts +19 -0
- package/dist/astro/middleware/setup.mjs +46 -0
- package/dist/astro/middleware.d.mts +12 -0
- package/dist/astro/middleware.mjs +1716 -0
- package/dist/astro/types.d.mts +269 -0
- package/dist/astro/types.mjs +1 -0
- package/dist/base64-F8-DUraK.mjs +58 -0
- package/dist/byline-DeWCMU_i.mjs +234 -0
- package/dist/bylines-DyqBV9EQ.mjs +137 -0
- package/dist/chunk-ClPoSABd.mjs +21 -0
- package/dist/cli/index.d.mts +1 -0
- package/dist/cli/index.mjs +3987 -0
- package/dist/client/external-auth-headers.d.mts +38 -0
- package/dist/client/external-auth-headers.mjs +101 -0
- package/dist/client/index.d.mts +397 -0
- package/dist/client/index.mjs +345 -0
- package/dist/config-Cq8H0SfX.mjs +46 -0
- package/dist/connection-C9pxzuag.mjs +52 -0
- package/dist/content-zSgdNmnt.mjs +836 -0
- package/dist/db/index.d.mts +4 -0
- package/dist/db/index.mjs +62 -0
- package/dist/db/libsql.d.mts +10 -0
- package/dist/db/libsql.mjs +21 -0
- package/dist/db/postgres.d.mts +10 -0
- package/dist/db/postgres.mjs +29 -0
- package/dist/db/sqlite.d.mts +10 -0
- package/dist/db/sqlite.mjs +15 -0
- package/dist/default-WYlzADZL.mjs +80 -0
- package/dist/dialect-helpers-B9uSp2GJ.mjs +89 -0
- package/dist/error-DrxtnGPg.mjs +26 -0
- package/dist/index-C-jx21qs.d.mts +4771 -0
- package/dist/index.d.mts +16 -0
- package/dist/index.mjs +30 -0
- package/dist/load-C6FCD1FU.mjs +27 -0
- package/dist/loader-qKmo0wAY.mjs +446 -0
- package/dist/manifest-schema-CTSEyIJ3.mjs +186 -0
- package/dist/media/index.d.mts +25 -0
- package/dist/media/index.mjs +54 -0
- package/dist/media/local-runtime.d.mts +38 -0
- package/dist/media/local-runtime.mjs +132 -0
- package/dist/media-DMTr80Gv.mjs +199 -0
- package/dist/mode-BlyYtIFO.mjs +22 -0
- package/dist/page/index.d.mts +148 -0
- package/dist/page/index.mjs +419 -0
- package/dist/placeholder-B3knXwNc.mjs +267 -0
- package/dist/placeholder-bOx1xCTY.d.mts +283 -0
- package/dist/plugin-utils.d.mts +57 -0
- package/dist/plugin-utils.mjs +77 -0
- package/dist/plugins/adapt-sandbox-entry.d.mts +21 -0
- package/dist/plugins/adapt-sandbox-entry.mjs +112 -0
- package/dist/query-BiaPl_g2.mjs +459 -0
- package/dist/redirect-JPqLAbxa.mjs +328 -0
- package/dist/registry-DSd1GWB8.mjs +851 -0
- package/dist/request-context.d.mts +49 -0
- package/dist/request-context.mjs +42 -0
- package/dist/runner-B5l1JfOj.d.mts +26 -0
- package/dist/runner-BGUGywgG.mjs +1529 -0
- package/dist/runtime.d.mts +25 -0
- package/dist/runtime.mjs +41 -0
- package/dist/search-BNruJHDL.mjs +11054 -0
- package/dist/seed/index.d.mts +3 -0
- package/dist/seed/index.mjs +15 -0
- package/dist/seo/index.d.mts +69 -0
- package/dist/seo/index.mjs +69 -0
- package/dist/storage/local.d.mts +38 -0
- package/dist/storage/local.mjs +165 -0
- package/dist/storage/s3.d.mts +31 -0
- package/dist/storage/s3.mjs +174 -0
- package/dist/tokens-4vgYuXsZ.mjs +170 -0
- package/dist/transport-C5FYnid7.mjs +417 -0
- package/dist/transport-gIL-e43D.d.mts +41 -0
- package/dist/types-BawVha09.mjs +30 -0
- package/dist/types-BgQeVaPj.d.mts +192 -0
- package/dist/types-CLLdsG3g.d.mts +103 -0
- package/dist/types-D38djUXv.d.mts +1196 -0
- package/dist/types-DShnjzb6.mjs +15 -0
- package/dist/types-DkvMXalq.d.mts +425 -0
- package/dist/types-DuNbGKjF.mjs +74 -0
- package/dist/types-ju-_ORz7.d.mts +182 -0
- package/dist/validate-CXnRKfJK.mjs +327 -0
- package/dist/validate-CqRJb_xU.mjs +96 -0
- package/dist/validate-DVKJJ-M_.d.mts +377 -0
- package/locals.d.ts +47 -0
- package/package.json +313 -0
|
@@ -0,0 +1,836 @@
|
|
|
1
|
+
import { t as __exportAll } from "./chunk-ClPoSABd.mjs";
|
|
2
|
+
import { n as decodeCursor, r as encodeCursor, t as DinewayValidationError } from "./types-BawVha09.mjs";
|
|
3
|
+
import { sql } from "kysely";
|
|
4
|
+
import { monotonicFactory, ulid } from "ulidx";
|
|
5
|
+
|
|
6
|
+
//#region src/utils/slugify.ts
|
|
7
|
+
const DIACRITICS_PATTERN = /[\u0300-\u036f]/g;
|
|
8
|
+
const WHITESPACE_UNDERSCORE_PATTERN = /[\s_]+/g;
|
|
9
|
+
const NON_ALPHANUMERIC_HYPHEN_PATTERN = /[^a-z0-9-]/g;
|
|
10
|
+
const MULTIPLE_HYPHENS_PATTERN = /-+/g;
|
|
11
|
+
const LEADING_TRAILING_HYPHEN_PATTERN = /^-|-$/g;
|
|
12
|
+
const TRAILING_HYPHEN_PATTERN = /-$/;
|
|
13
|
+
/**
|
|
14
|
+
* Convert a string to a URL-friendly slug.
|
|
15
|
+
*
|
|
16
|
+
* Handles unicode by normalizing to NFD and stripping diacritics,
|
|
17
|
+
* so "café" becomes "cafe", "naïve" becomes "naive", etc.
|
|
18
|
+
*/
|
|
19
|
+
function slugify(text, maxLength = 80) {
|
|
20
|
+
return text.toLowerCase().normalize("NFD").replace(DIACRITICS_PATTERN, "").replace(WHITESPACE_UNDERSCORE_PATTERN, "-").replace(NON_ALPHANUMERIC_HYPHEN_PATTERN, "").replace(MULTIPLE_HYPHENS_PATTERN, "-").replace(LEADING_TRAILING_HYPHEN_PATTERN, "").slice(0, maxLength).replace(TRAILING_HYPHEN_PATTERN, "");
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
//#endregion
|
|
24
|
+
//#region src/database/repositories/revision.ts
|
|
25
|
+
const monotonic = monotonicFactory();
|
|
26
|
+
/**
|
|
27
|
+
* Revision repository for version history
|
|
28
|
+
*
|
|
29
|
+
* Each revision stores a JSON snapshot of the content at a point in time.
|
|
30
|
+
* Used when collection has `supports: ["revisions"]` enabled.
|
|
31
|
+
*/
|
|
32
|
+
var RevisionRepository = class {
|
|
33
|
+
constructor(db) {
|
|
34
|
+
this.db = db;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Create a new revision
|
|
38
|
+
*/
|
|
39
|
+
async create(input) {
|
|
40
|
+
const id = monotonic();
|
|
41
|
+
const row = {
|
|
42
|
+
id,
|
|
43
|
+
collection: input.collection,
|
|
44
|
+
entry_id: input.entryId,
|
|
45
|
+
data: JSON.stringify(input.data),
|
|
46
|
+
author_id: input.authorId ?? null
|
|
47
|
+
};
|
|
48
|
+
await this.db.insertInto("revisions").values(row).execute();
|
|
49
|
+
const revision = await this.findById(id);
|
|
50
|
+
if (!revision) throw new Error("Failed to create revision");
|
|
51
|
+
return revision;
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Find revision by ID
|
|
55
|
+
*/
|
|
56
|
+
async findById(id) {
|
|
57
|
+
const row = await this.db.selectFrom("revisions").selectAll().where("id", "=", id).executeTakeFirst();
|
|
58
|
+
return row ? this.rowToRevision(row) : null;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Get all revisions for an entry (newest first)
|
|
62
|
+
*
|
|
63
|
+
* Orders by monotonic ULID (descending). The monotonic factory
|
|
64
|
+
* guarantees strictly increasing IDs even within the same millisecond.
|
|
65
|
+
*/
|
|
66
|
+
async findByEntry(collection, entryId, options = {}) {
|
|
67
|
+
let query = this.db.selectFrom("revisions").selectAll().where("collection", "=", collection).where("entry_id", "=", entryId).orderBy("id", "desc");
|
|
68
|
+
if (options.limit) query = query.limit(options.limit);
|
|
69
|
+
return (await query.execute()).map((row) => this.rowToRevision(row));
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Get the most recent revision for an entry
|
|
73
|
+
*/
|
|
74
|
+
async findLatest(collection, entryId) {
|
|
75
|
+
const row = await this.db.selectFrom("revisions").selectAll().where("collection", "=", collection).where("entry_id", "=", entryId).orderBy("id", "desc").limit(1).executeTakeFirst();
|
|
76
|
+
return row ? this.rowToRevision(row) : null;
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Count revisions for an entry
|
|
80
|
+
*/
|
|
81
|
+
async countByEntry(collection, entryId) {
|
|
82
|
+
const result = await this.db.selectFrom("revisions").select((eb) => eb.fn.count("id").as("count")).where("collection", "=", collection).where("entry_id", "=", entryId).executeTakeFirst();
|
|
83
|
+
return Number(result?.count || 0);
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Delete all revisions for an entry (use when entry is deleted)
|
|
87
|
+
*/
|
|
88
|
+
async deleteByEntry(collection, entryId) {
|
|
89
|
+
const result = await this.db.deleteFrom("revisions").where("collection", "=", collection).where("entry_id", "=", entryId).executeTakeFirst();
|
|
90
|
+
return Number(result.numDeletedRows ?? 0);
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Delete old revisions, keeping the most recent N
|
|
94
|
+
*/
|
|
95
|
+
async pruneOldRevisions(collection, entryId, keepCount) {
|
|
96
|
+
const keepIds = (await this.db.selectFrom("revisions").select("id").where("collection", "=", collection).where("entry_id", "=", entryId).orderBy("created_at", "desc").orderBy("id", "desc").limit(keepCount).execute()).map((r) => r.id);
|
|
97
|
+
if (keepIds.length === 0) return 0;
|
|
98
|
+
const result = await this.db.deleteFrom("revisions").where("collection", "=", collection).where("entry_id", "=", entryId).where("id", "not in", keepIds).executeTakeFirst();
|
|
99
|
+
return Number(result.numDeletedRows ?? 0);
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Update revision data in place
|
|
103
|
+
* Used for autosave to avoid creating many small revisions.
|
|
104
|
+
*/
|
|
105
|
+
async updateData(id, data) {
|
|
106
|
+
await this.db.updateTable("revisions").set({ data: JSON.stringify(data) }).where("id", "=", id).execute();
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Convert database row to Revision object
|
|
110
|
+
*/
|
|
111
|
+
rowToRevision(row) {
|
|
112
|
+
return {
|
|
113
|
+
id: row.id,
|
|
114
|
+
collection: row.collection,
|
|
115
|
+
entryId: row.entry_id,
|
|
116
|
+
data: JSON.parse(row.data),
|
|
117
|
+
authorId: row.author_id,
|
|
118
|
+
createdAt: row.created_at
|
|
119
|
+
};
|
|
120
|
+
}
|
|
121
|
+
};
|
|
122
|
+
|
|
123
|
+
//#endregion
|
|
124
|
+
//#region src/database/repositories/content.ts
|
|
125
|
+
var content_exports = /* @__PURE__ */ __exportAll({ ContentRepository: () => ContentRepository });
|
|
126
|
+
const ULID_PATTERN = /^[0-9A-Z]{26}$/;
|
|
127
|
+
/**
|
|
128
|
+
* System columns that exist in every ec_* table
|
|
129
|
+
*/
|
|
130
|
+
const SYSTEM_COLUMNS = new Set([
|
|
131
|
+
"id",
|
|
132
|
+
"slug",
|
|
133
|
+
"status",
|
|
134
|
+
"author_id",
|
|
135
|
+
"primary_byline_id",
|
|
136
|
+
"created_at",
|
|
137
|
+
"updated_at",
|
|
138
|
+
"published_at",
|
|
139
|
+
"scheduled_at",
|
|
140
|
+
"deleted_at",
|
|
141
|
+
"version",
|
|
142
|
+
"live_revision_id",
|
|
143
|
+
"draft_revision_id",
|
|
144
|
+
"locale",
|
|
145
|
+
"translation_group"
|
|
146
|
+
]);
|
|
147
|
+
/**
|
|
148
|
+
* Get the table name for a collection type
|
|
149
|
+
*/
|
|
150
|
+
function getTableName(type) {
|
|
151
|
+
return `ec_${type}`;
|
|
152
|
+
}
|
|
153
|
+
/**
|
|
154
|
+
* Serialize a value for database storage
|
|
155
|
+
* Objects/arrays are JSON-stringified
|
|
156
|
+
* Booleans are converted to 0/1 for SQLite
|
|
157
|
+
*/
|
|
158
|
+
function serializeValue(value) {
|
|
159
|
+
if (value === null || value === void 0) return null;
|
|
160
|
+
if (typeof value === "boolean") return value ? 1 : 0;
|
|
161
|
+
if (typeof value === "object") return JSON.stringify(value);
|
|
162
|
+
return value;
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* Deserialize a value from database storage
|
|
166
|
+
* Attempts to parse JSON strings that look like objects/arrays
|
|
167
|
+
*/
|
|
168
|
+
function deserializeValue(value) {
|
|
169
|
+
if (typeof value === "string") {
|
|
170
|
+
if (value.startsWith("{") || value.startsWith("[")) try {
|
|
171
|
+
return JSON.parse(value);
|
|
172
|
+
} catch {
|
|
173
|
+
return value;
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
return value;
|
|
177
|
+
}
|
|
178
|
+
/** Pattern for escaping special regex characters */
|
|
179
|
+
const REGEX_ESCAPE_PATTERN = /[.*+?^${}()|[\]\\]/g;
|
|
180
|
+
/**
|
|
181
|
+
* Escape special regex characters in a string for use in `new RegExp()`
|
|
182
|
+
*/
|
|
183
|
+
function escapeRegExp(s) {
|
|
184
|
+
return s.replace(REGEX_ESCAPE_PATTERN, "\\$&");
|
|
185
|
+
}
|
|
186
|
+
/**
|
|
187
|
+
* Repository for content CRUD operations
|
|
188
|
+
*
|
|
189
|
+
* Content is stored in per-collection tables (ec_posts, ec_pages, etc.)
|
|
190
|
+
* Each field becomes a real column in the table.
|
|
191
|
+
*/
|
|
192
|
+
var ContentRepository = class {
|
|
193
|
+
constructor(db) {
|
|
194
|
+
this.db = db;
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Create a new content item
|
|
198
|
+
*/
|
|
199
|
+
async create(input) {
|
|
200
|
+
const id = ulid();
|
|
201
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
202
|
+
const { type, slug, data, status = "draft", authorId, primaryBylineId, locale, translationOf, publishedAt, createdAt } = input;
|
|
203
|
+
if (!type) throw new DinewayValidationError("Content type is required");
|
|
204
|
+
const tableName = getTableName(type);
|
|
205
|
+
let translationGroup = id;
|
|
206
|
+
if (translationOf) {
|
|
207
|
+
const source = await this.findById(type, translationOf);
|
|
208
|
+
if (!source) throw new DinewayValidationError("Translation source content not found");
|
|
209
|
+
translationGroup = source.translationGroup || source.id;
|
|
210
|
+
}
|
|
211
|
+
const columns = [
|
|
212
|
+
"id",
|
|
213
|
+
"slug",
|
|
214
|
+
"status",
|
|
215
|
+
"author_id",
|
|
216
|
+
"primary_byline_id",
|
|
217
|
+
"created_at",
|
|
218
|
+
"updated_at",
|
|
219
|
+
"published_at",
|
|
220
|
+
"version",
|
|
221
|
+
"locale",
|
|
222
|
+
"translation_group"
|
|
223
|
+
];
|
|
224
|
+
const values = [
|
|
225
|
+
id,
|
|
226
|
+
slug || null,
|
|
227
|
+
status,
|
|
228
|
+
authorId || null,
|
|
229
|
+
primaryBylineId ?? null,
|
|
230
|
+
createdAt || now,
|
|
231
|
+
now,
|
|
232
|
+
publishedAt || null,
|
|
233
|
+
1,
|
|
234
|
+
locale || "en",
|
|
235
|
+
translationGroup
|
|
236
|
+
];
|
|
237
|
+
if (data && typeof data === "object") {
|
|
238
|
+
for (const [key, value] of Object.entries(data)) if (!SYSTEM_COLUMNS.has(key)) {
|
|
239
|
+
columns.push(key);
|
|
240
|
+
values.push(serializeValue(value));
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
const columnRefs = columns.map((c) => sql.ref(c));
|
|
244
|
+
const valuePlaceholders = values.map((v) => v === null ? sql`NULL` : sql`${v}`);
|
|
245
|
+
await sql`
|
|
246
|
+
INSERT INTO ${sql.ref(tableName)} (${sql.join(columnRefs, sql`, `)})
|
|
247
|
+
VALUES (${sql.join(valuePlaceholders, sql`, `)})
|
|
248
|
+
`.execute(this.db);
|
|
249
|
+
const item = await this.findById(type, id);
|
|
250
|
+
if (!item) throw new Error("Failed to create content");
|
|
251
|
+
return item;
|
|
252
|
+
}
|
|
253
|
+
/**
|
|
254
|
+
* Generate a unique slug for a content item within a collection.
|
|
255
|
+
*
|
|
256
|
+
* Checks the collection table for existing slugs that match `baseSlug`
|
|
257
|
+
* (optionally scoped to a locale) and appends a numeric suffix (`-1`,
|
|
258
|
+
* `-2`, etc.) on collision to guarantee uniqueness.
|
|
259
|
+
*
|
|
260
|
+
* Returns `null` if `baseSlug` is empty after slugification.
|
|
261
|
+
*/
|
|
262
|
+
async generateUniqueSlug(type, text, locale) {
|
|
263
|
+
const baseSlug = slugify(text);
|
|
264
|
+
if (!baseSlug) return null;
|
|
265
|
+
const tableName = getTableName(type);
|
|
266
|
+
if ((locale ? await sql`
|
|
267
|
+
SELECT slug FROM ${sql.ref(tableName)}
|
|
268
|
+
WHERE slug = ${baseSlug}
|
|
269
|
+
AND locale = ${locale}
|
|
270
|
+
LIMIT 1
|
|
271
|
+
`.execute(this.db) : await sql`
|
|
272
|
+
SELECT slug FROM ${sql.ref(tableName)}
|
|
273
|
+
WHERE slug = ${baseSlug}
|
|
274
|
+
LIMIT 1
|
|
275
|
+
`.execute(this.db)).rows.length === 0) return baseSlug;
|
|
276
|
+
const pattern = `${baseSlug}-%`;
|
|
277
|
+
const candidates = locale ? await sql`
|
|
278
|
+
SELECT slug FROM ${sql.ref(tableName)}
|
|
279
|
+
WHERE (slug = ${baseSlug} OR slug LIKE ${pattern})
|
|
280
|
+
AND locale = ${locale}
|
|
281
|
+
`.execute(this.db) : await sql`
|
|
282
|
+
SELECT slug FROM ${sql.ref(tableName)}
|
|
283
|
+
WHERE slug = ${baseSlug} OR slug LIKE ${pattern}
|
|
284
|
+
`.execute(this.db);
|
|
285
|
+
let maxSuffix = 0;
|
|
286
|
+
const suffixPattern = new RegExp(`^${escapeRegExp(baseSlug)}-(\\d+)$`);
|
|
287
|
+
for (const row of candidates.rows) {
|
|
288
|
+
const match = suffixPattern.exec(row.slug);
|
|
289
|
+
if (match) {
|
|
290
|
+
const n = parseInt(match[1], 10);
|
|
291
|
+
if (n > maxSuffix) maxSuffix = n;
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
return `${baseSlug}-${maxSuffix + 1}`;
|
|
295
|
+
}
|
|
296
|
+
/**
|
|
297
|
+
* Duplicate a content item
|
|
298
|
+
* Creates a new draft copy with "(Copy)" appended to the title.
|
|
299
|
+
* A slug is auto-generated from the new title by the handler layer.
|
|
300
|
+
*/
|
|
301
|
+
async duplicate(type, id, authorId) {
|
|
302
|
+
const original = await this.findById(type, id);
|
|
303
|
+
if (!original) throw new DinewayValidationError("Content item not found");
|
|
304
|
+
const newData = { ...original.data };
|
|
305
|
+
if (typeof newData.title === "string") newData.title = `${newData.title} (Copy)`;
|
|
306
|
+
else if (typeof newData.name === "string") newData.name = `${newData.name} (Copy)`;
|
|
307
|
+
const slugSource = typeof newData.title === "string" ? newData.title : typeof newData.name === "string" ? newData.name : null;
|
|
308
|
+
const slug = slugSource ? await this.generateUniqueSlug(type, slugSource, original.locale ?? void 0) : null;
|
|
309
|
+
return this.create({
|
|
310
|
+
type,
|
|
311
|
+
slug,
|
|
312
|
+
data: newData,
|
|
313
|
+
status: "draft",
|
|
314
|
+
authorId: authorId || original.authorId || void 0
|
|
315
|
+
});
|
|
316
|
+
}
|
|
317
|
+
/**
|
|
318
|
+
* Find content by ID
|
|
319
|
+
*/
|
|
320
|
+
async findById(type, id) {
|
|
321
|
+
const tableName = getTableName(type);
|
|
322
|
+
const row = (await sql`
|
|
323
|
+
SELECT * FROM ${sql.ref(tableName)}
|
|
324
|
+
WHERE id = ${id}
|
|
325
|
+
AND deleted_at IS NULL
|
|
326
|
+
`.execute(this.db)).rows[0];
|
|
327
|
+
if (!row) return null;
|
|
328
|
+
return this.mapRow(type, row);
|
|
329
|
+
}
|
|
330
|
+
/**
|
|
331
|
+
* Find content by id, including trashed (soft-deleted) items.
|
|
332
|
+
* Used by restore endpoint for ownership checks.
|
|
333
|
+
*/
|
|
334
|
+
async findByIdIncludingTrashed(type, id) {
|
|
335
|
+
const tableName = getTableName(type);
|
|
336
|
+
const row = (await sql`
|
|
337
|
+
SELECT * FROM ${sql.ref(tableName)}
|
|
338
|
+
WHERE id = ${id}
|
|
339
|
+
`.execute(this.db)).rows[0];
|
|
340
|
+
if (!row) return null;
|
|
341
|
+
return this.mapRow(type, row);
|
|
342
|
+
}
|
|
343
|
+
/**
|
|
344
|
+
* Find content by ID or slug. Tries ID first if it looks like a ULID,
|
|
345
|
+
* otherwise tries slug. Falls back to the other if the first lookup misses.
|
|
346
|
+
*/
|
|
347
|
+
async findByIdOrSlug(type, identifier, locale) {
|
|
348
|
+
return this._findByIdOrSlug(type, identifier, false, locale);
|
|
349
|
+
}
|
|
350
|
+
/**
|
|
351
|
+
* Find content by ID or slug, including trashed (soft-deleted) items.
|
|
352
|
+
* Used by restore/permanent-delete endpoints.
|
|
353
|
+
*/
|
|
354
|
+
async findByIdOrSlugIncludingTrashed(type, identifier, locale) {
|
|
355
|
+
return this._findByIdOrSlug(type, identifier, true, locale);
|
|
356
|
+
}
|
|
357
|
+
async _findByIdOrSlug(type, identifier, includeTrashed, locale) {
|
|
358
|
+
const looksLikeUlid = ULID_PATTERN.test(identifier);
|
|
359
|
+
const findById = includeTrashed ? (t, id) => this.findByIdIncludingTrashed(t, id) : (t, id) => this.findById(t, id);
|
|
360
|
+
const findBySlug = includeTrashed ? (t, s) => this.findBySlugIncludingTrashed(t, s, locale) : (t, s) => this.findBySlug(t, s, locale);
|
|
361
|
+
if (looksLikeUlid) {
|
|
362
|
+
const byId = await findById(type, identifier);
|
|
363
|
+
if (byId) return byId;
|
|
364
|
+
return findBySlug(type, identifier);
|
|
365
|
+
}
|
|
366
|
+
const bySlug = await findBySlug(type, identifier);
|
|
367
|
+
if (bySlug) return bySlug;
|
|
368
|
+
return findById(type, identifier);
|
|
369
|
+
}
|
|
370
|
+
/**
|
|
371
|
+
* Find content by slug
|
|
372
|
+
*/
|
|
373
|
+
async findBySlug(type, slug, locale) {
|
|
374
|
+
const tableName = getTableName(type);
|
|
375
|
+
const row = (locale ? await sql`
|
|
376
|
+
SELECT * FROM ${sql.ref(tableName)}
|
|
377
|
+
WHERE slug = ${slug}
|
|
378
|
+
AND locale = ${locale}
|
|
379
|
+
AND deleted_at IS NULL
|
|
380
|
+
`.execute(this.db) : await sql`
|
|
381
|
+
SELECT * FROM ${sql.ref(tableName)}
|
|
382
|
+
WHERE slug = ${slug}
|
|
383
|
+
AND deleted_at IS NULL
|
|
384
|
+
ORDER BY locale ASC
|
|
385
|
+
LIMIT 1
|
|
386
|
+
`.execute(this.db)).rows[0];
|
|
387
|
+
if (!row) return null;
|
|
388
|
+
return this.mapRow(type, row);
|
|
389
|
+
}
|
|
390
|
+
/**
|
|
391
|
+
* Find content by slug, including trashed (soft-deleted) items.
|
|
392
|
+
* Used by restore/permanent-delete endpoints.
|
|
393
|
+
*/
|
|
394
|
+
async findBySlugIncludingTrashed(type, slug, locale) {
|
|
395
|
+
const tableName = getTableName(type);
|
|
396
|
+
const row = (locale ? await sql`
|
|
397
|
+
SELECT * FROM ${sql.ref(tableName)}
|
|
398
|
+
WHERE slug = ${slug}
|
|
399
|
+
AND locale = ${locale}
|
|
400
|
+
`.execute(this.db) : await sql`
|
|
401
|
+
SELECT * FROM ${sql.ref(tableName)}
|
|
402
|
+
WHERE slug = ${slug}
|
|
403
|
+
ORDER BY locale ASC
|
|
404
|
+
LIMIT 1
|
|
405
|
+
`.execute(this.db)).rows[0];
|
|
406
|
+
if (!row) return null;
|
|
407
|
+
return this.mapRow(type, row);
|
|
408
|
+
}
|
|
409
|
+
/**
|
|
410
|
+
* Find many content items with filtering and pagination
|
|
411
|
+
*/
|
|
412
|
+
async findMany(type, options = {}) {
|
|
413
|
+
const tableName = getTableName(type);
|
|
414
|
+
const limit = Math.min(options.limit || 50, 100);
|
|
415
|
+
const orderField = options.orderBy?.field || "createdAt";
|
|
416
|
+
const orderDirection = options.orderBy?.direction || "desc";
|
|
417
|
+
const dbField = this.mapOrderField(orderField);
|
|
418
|
+
const safeOrderDirection = orderDirection.toLowerCase() === "asc" ? "ASC" : "DESC";
|
|
419
|
+
let query = this.db.selectFrom(tableName).selectAll().where("deleted_at", "is", null);
|
|
420
|
+
if (options.where?.status) query = query.where("status", "=", options.where.status);
|
|
421
|
+
if (options.where?.authorId) query = query.where("author_id", "=", options.where.authorId);
|
|
422
|
+
if (options.where?.locale) query = query.where("locale", "=", options.where.locale);
|
|
423
|
+
if (options.cursor) {
|
|
424
|
+
const decoded = decodeCursor(options.cursor);
|
|
425
|
+
if (decoded) {
|
|
426
|
+
const { orderValue, id: cursorId } = decoded;
|
|
427
|
+
if (safeOrderDirection === "DESC") query = query.where((eb) => eb.or([eb(dbField, "<", orderValue), eb.and([eb(dbField, "=", orderValue), eb("id", "<", cursorId)])]));
|
|
428
|
+
else query = query.where((eb) => eb.or([eb(dbField, ">", orderValue), eb.and([eb(dbField, "=", orderValue), eb("id", ">", cursorId)])]));
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
query = query.orderBy(dbField, safeOrderDirection === "ASC" ? "asc" : "desc").orderBy("id", safeOrderDirection === "ASC" ? "asc" : "desc").limit(limit + 1);
|
|
432
|
+
const rows = await query.execute();
|
|
433
|
+
const hasMore = rows.length > limit;
|
|
434
|
+
const items = rows.slice(0, limit);
|
|
435
|
+
const mappedResult = { items: items.map((row) => this.mapRow(type, row)) };
|
|
436
|
+
if (hasMore && items.length > 0) {
|
|
437
|
+
const lastRow = items.at(-1);
|
|
438
|
+
const lastOrderValue = lastRow[dbField];
|
|
439
|
+
mappedResult.nextCursor = encodeCursor(typeof lastOrderValue === "string" || typeof lastOrderValue === "number" ? String(lastOrderValue) : "", String(lastRow.id));
|
|
440
|
+
}
|
|
441
|
+
return mappedResult;
|
|
442
|
+
}
|
|
443
|
+
/**
|
|
444
|
+
* Update content
|
|
445
|
+
*/
|
|
446
|
+
async update(type, id, input) {
|
|
447
|
+
const tableName = getTableName(type);
|
|
448
|
+
const updates = {
|
|
449
|
+
updated_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
450
|
+
version: sql`version + 1`
|
|
451
|
+
};
|
|
452
|
+
if (input.status !== void 0) updates.status = input.status;
|
|
453
|
+
if (input.slug !== void 0) updates.slug = input.slug;
|
|
454
|
+
if (input.publishedAt !== void 0) updates.published_at = input.publishedAt;
|
|
455
|
+
if (input.scheduledAt !== void 0) updates.scheduled_at = input.scheduledAt;
|
|
456
|
+
if (input.authorId !== void 0) updates.author_id = input.authorId;
|
|
457
|
+
if (input.primaryBylineId !== void 0) updates.primary_byline_id = input.primaryBylineId;
|
|
458
|
+
if (input.data !== void 0 && typeof input.data === "object") {
|
|
459
|
+
for (const [key, value] of Object.entries(input.data)) if (!SYSTEM_COLUMNS.has(key)) updates[key] = serializeValue(value);
|
|
460
|
+
}
|
|
461
|
+
await this.db.updateTable(tableName).set(updates).where("id", "=", id).where("deleted_at", "is", null).execute();
|
|
462
|
+
const updated = await this.findById(type, id);
|
|
463
|
+
if (!updated) throw new Error("Content not found");
|
|
464
|
+
return updated;
|
|
465
|
+
}
|
|
466
|
+
/**
|
|
467
|
+
* Delete content (soft delete - moves to trash)
|
|
468
|
+
*/
|
|
469
|
+
async delete(type, id) {
|
|
470
|
+
const tableName = getTableName(type);
|
|
471
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
472
|
+
return ((await sql`
|
|
473
|
+
UPDATE ${sql.ref(tableName)}
|
|
474
|
+
SET deleted_at = ${now}
|
|
475
|
+
WHERE id = ${id}
|
|
476
|
+
AND deleted_at IS NULL
|
|
477
|
+
`.execute(this.db)).numAffectedRows ?? 0n) > 0n;
|
|
478
|
+
}
|
|
479
|
+
/**
|
|
480
|
+
* Restore content from trash
|
|
481
|
+
*/
|
|
482
|
+
async restore(type, id) {
|
|
483
|
+
const tableName = getTableName(type);
|
|
484
|
+
return ((await sql`
|
|
485
|
+
UPDATE ${sql.ref(tableName)}
|
|
486
|
+
SET deleted_at = NULL
|
|
487
|
+
WHERE id = ${id}
|
|
488
|
+
AND deleted_at IS NOT NULL
|
|
489
|
+
`.execute(this.db)).numAffectedRows ?? 0n) > 0n;
|
|
490
|
+
}
|
|
491
|
+
/**
|
|
492
|
+
* Permanently delete content (cannot be undone)
|
|
493
|
+
*/
|
|
494
|
+
async permanentDelete(type, id) {
|
|
495
|
+
const tableName = getTableName(type);
|
|
496
|
+
return ((await sql`
|
|
497
|
+
DELETE FROM ${sql.ref(tableName)}
|
|
498
|
+
WHERE id = ${id}
|
|
499
|
+
`.execute(this.db)).numAffectedRows ?? 0n) > 0n;
|
|
500
|
+
}
|
|
501
|
+
/**
|
|
502
|
+
* Find trashed content items
|
|
503
|
+
*/
|
|
504
|
+
async findTrashed(type, options = {}) {
|
|
505
|
+
const tableName = getTableName(type);
|
|
506
|
+
const limit = Math.min(options.limit || 50, 100);
|
|
507
|
+
const orderField = options.orderBy?.field || "deletedAt";
|
|
508
|
+
const orderDirection = options.orderBy?.direction || "desc";
|
|
509
|
+
const dbField = this.mapOrderField(orderField);
|
|
510
|
+
const safeOrderDirection = orderDirection.toLowerCase() === "asc" ? "ASC" : "DESC";
|
|
511
|
+
let query = this.db.selectFrom(tableName).selectAll().where("deleted_at", "is not", null);
|
|
512
|
+
if (options.cursor) {
|
|
513
|
+
const decoded = decodeCursor(options.cursor);
|
|
514
|
+
if (decoded) {
|
|
515
|
+
const { orderValue, id: cursorId } = decoded;
|
|
516
|
+
if (safeOrderDirection === "DESC") query = query.where((eb) => eb.or([eb(dbField, "<", orderValue), eb.and([eb(dbField, "=", orderValue), eb("id", "<", cursorId)])]));
|
|
517
|
+
else query = query.where((eb) => eb.or([eb(dbField, ">", orderValue), eb.and([eb(dbField, "=", orderValue), eb("id", ">", cursorId)])]));
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
query = query.orderBy(dbField, safeOrderDirection === "ASC" ? "asc" : "desc").orderBy("id", safeOrderDirection === "ASC" ? "asc" : "desc").limit(limit + 1);
|
|
521
|
+
const rows = await query.execute();
|
|
522
|
+
const hasMore = rows.length > limit;
|
|
523
|
+
const items = rows.slice(0, limit);
|
|
524
|
+
const mappedResult = { items: items.map((row) => {
|
|
525
|
+
const record = row;
|
|
526
|
+
return {
|
|
527
|
+
...this.mapRow(type, record),
|
|
528
|
+
deletedAt: typeof record.deleted_at === "string" ? record.deleted_at : ""
|
|
529
|
+
};
|
|
530
|
+
}) };
|
|
531
|
+
if (hasMore && items.length > 0) {
|
|
532
|
+
const lastRow = items.at(-1);
|
|
533
|
+
const lastOrderValue = lastRow[dbField];
|
|
534
|
+
mappedResult.nextCursor = encodeCursor(typeof lastOrderValue === "string" || typeof lastOrderValue === "number" ? String(lastOrderValue) : "", String(lastRow.id));
|
|
535
|
+
}
|
|
536
|
+
return mappedResult;
|
|
537
|
+
}
|
|
538
|
+
/**
|
|
539
|
+
* Count trashed content items
|
|
540
|
+
*/
|
|
541
|
+
async countTrashed(type) {
|
|
542
|
+
const tableName = getTableName(type);
|
|
543
|
+
const result = await this.db.selectFrom(tableName).select((eb) => eb.fn.count("id").as("count")).where("deleted_at", "is not", null).executeTakeFirst();
|
|
544
|
+
return Number(result?.count || 0);
|
|
545
|
+
}
|
|
546
|
+
/**
|
|
547
|
+
* Count content items
|
|
548
|
+
*/
|
|
549
|
+
async count(type, where) {
|
|
550
|
+
const tableName = getTableName(type);
|
|
551
|
+
let query = this.db.selectFrom(tableName).select((eb) => eb.fn.count("id").as("count")).where("deleted_at", "is", null);
|
|
552
|
+
if (where?.status) query = query.where("status", "=", where.status);
|
|
553
|
+
if (where?.authorId) query = query.where("author_id", "=", where.authorId);
|
|
554
|
+
if (where?.locale) query = query.where("locale", "=", where.locale);
|
|
555
|
+
const result = await query.executeTakeFirst();
|
|
556
|
+
return Number(result?.count || 0);
|
|
557
|
+
}
|
|
558
|
+
async getStats(type) {
|
|
559
|
+
const tableName = getTableName(type);
|
|
560
|
+
const result = await this.db.selectFrom(tableName).select((eb) => [
|
|
561
|
+
eb.fn.count("id").as("total"),
|
|
562
|
+
eb.fn.sum(eb.case().when("status", "=", "published").then(1).else(0).end()).as("published"),
|
|
563
|
+
eb.fn.sum(eb.case().when("status", "=", "draft").then(1).else(0).end()).as("draft")
|
|
564
|
+
]).where("deleted_at", "is", null).executeTakeFirst();
|
|
565
|
+
return {
|
|
566
|
+
total: Number(result?.total || 0),
|
|
567
|
+
published: Number(result?.published || 0),
|
|
568
|
+
draft: Number(result?.draft || 0)
|
|
569
|
+
};
|
|
570
|
+
}
|
|
571
|
+
/**
|
|
572
|
+
* Schedule content for future publishing
|
|
573
|
+
*
|
|
574
|
+
* Sets status to 'scheduled' and stores the scheduled publish time.
|
|
575
|
+
* The content will be auto-published when the scheduled time is reached.
|
|
576
|
+
*/
|
|
577
|
+
async schedule(type, id, scheduledAt) {
|
|
578
|
+
const tableName = getTableName(type);
|
|
579
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
580
|
+
const scheduledDate = new Date(scheduledAt);
|
|
581
|
+
if (isNaN(scheduledDate.getTime())) throw new DinewayValidationError("Invalid scheduled date");
|
|
582
|
+
if (scheduledDate <= /* @__PURE__ */ new Date()) throw new DinewayValidationError("Scheduled date must be in the future");
|
|
583
|
+
const existing = await this.findById(type, id);
|
|
584
|
+
if (!existing) throw new DinewayValidationError("Content item not found");
|
|
585
|
+
const newStatus = existing.status === "published" ? "published" : "scheduled";
|
|
586
|
+
await sql`
|
|
587
|
+
UPDATE ${sql.ref(tableName)}
|
|
588
|
+
SET status = ${newStatus},
|
|
589
|
+
scheduled_at = ${scheduledAt},
|
|
590
|
+
updated_at = ${now}
|
|
591
|
+
WHERE id = ${id}
|
|
592
|
+
AND deleted_at IS NULL
|
|
593
|
+
`.execute(this.db);
|
|
594
|
+
const updated = await this.findById(type, id);
|
|
595
|
+
if (!updated) throw new Error("Content not found");
|
|
596
|
+
return updated;
|
|
597
|
+
}
|
|
598
|
+
/**
|
|
599
|
+
* Unschedule content
|
|
600
|
+
*
|
|
601
|
+
* Clears the scheduled time. Published posts stay published;
|
|
602
|
+
* draft/scheduled posts revert to 'draft'.
|
|
603
|
+
*/
|
|
604
|
+
async unschedule(type, id) {
|
|
605
|
+
const tableName = getTableName(type);
|
|
606
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
607
|
+
const existing = await this.findById(type, id);
|
|
608
|
+
if (!existing) throw new DinewayValidationError("Content item not found");
|
|
609
|
+
const newStatus = existing.status === "published" ? "published" : "draft";
|
|
610
|
+
await sql`
|
|
611
|
+
UPDATE ${sql.ref(tableName)}
|
|
612
|
+
SET status = ${newStatus},
|
|
613
|
+
scheduled_at = NULL,
|
|
614
|
+
updated_at = ${now}
|
|
615
|
+
WHERE id = ${id}
|
|
616
|
+
AND scheduled_at IS NOT NULL
|
|
617
|
+
AND deleted_at IS NULL
|
|
618
|
+
`.execute(this.db);
|
|
619
|
+
const updated = await this.findById(type, id);
|
|
620
|
+
if (!updated) throw new Error("Content not found");
|
|
621
|
+
return updated;
|
|
622
|
+
}
|
|
623
|
+
/**
|
|
624
|
+
* Find content that is ready to be published
|
|
625
|
+
*
|
|
626
|
+
* Returns all content where scheduled_at <= now, regardless of status.
|
|
627
|
+
* This covers both draft-scheduled posts (status='scheduled') and
|
|
628
|
+
* published posts with scheduled draft changes (status='published').
|
|
629
|
+
*/
|
|
630
|
+
async findReadyToPublish(type) {
|
|
631
|
+
const tableName = getTableName(type);
|
|
632
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
633
|
+
return (await sql`
|
|
634
|
+
SELECT * FROM ${sql.ref(tableName)}
|
|
635
|
+
WHERE scheduled_at IS NOT NULL
|
|
636
|
+
AND scheduled_at <= ${now}
|
|
637
|
+
AND deleted_at IS NULL
|
|
638
|
+
ORDER BY scheduled_at ASC
|
|
639
|
+
`.execute(this.db)).rows.map((row) => this.mapRow(type, row));
|
|
640
|
+
}
|
|
641
|
+
/**
|
|
642
|
+
* Find all translations in a translation group
|
|
643
|
+
*/
|
|
644
|
+
async findTranslations(type, translationGroup) {
|
|
645
|
+
const tableName = getTableName(type);
|
|
646
|
+
return (await sql`
|
|
647
|
+
SELECT * FROM ${sql.ref(tableName)}
|
|
648
|
+
WHERE translation_group = ${translationGroup}
|
|
649
|
+
AND deleted_at IS NULL
|
|
650
|
+
ORDER BY locale ASC
|
|
651
|
+
`.execute(this.db)).rows.map((row) => this.mapRow(type, row));
|
|
652
|
+
}
|
|
653
|
+
/**
|
|
654
|
+
* Publish the current draft
|
|
655
|
+
*
|
|
656
|
+
* Promotes draft_revision_id to live_revision_id and clears draft pointer.
|
|
657
|
+
* Syncs the draft revision's data into the content table columns so the
|
|
658
|
+
* content table always reflects the published version.
|
|
659
|
+
* If no draft revision exists, creates one from current data and publishes it.
|
|
660
|
+
*/
|
|
661
|
+
async publish(type, id) {
|
|
662
|
+
const tableName = getTableName(type);
|
|
663
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
664
|
+
const existing = await this.findById(type, id);
|
|
665
|
+
if (!existing) throw new DinewayValidationError("Content item not found");
|
|
666
|
+
const revisionRepo = new RevisionRepository(this.db);
|
|
667
|
+
let revisionToPublish = existing.draftRevisionId || existing.liveRevisionId;
|
|
668
|
+
if (!revisionToPublish) revisionToPublish = (await revisionRepo.create({
|
|
669
|
+
collection: type,
|
|
670
|
+
entryId: id,
|
|
671
|
+
data: existing.data
|
|
672
|
+
})).id;
|
|
673
|
+
const revision = await revisionRepo.findById(revisionToPublish);
|
|
674
|
+
if (revision) {
|
|
675
|
+
await this.syncDataColumns(type, id, revision.data);
|
|
676
|
+
if (typeof revision.data._slug === "string") await sql`
|
|
677
|
+
UPDATE ${sql.ref(tableName)}
|
|
678
|
+
SET slug = ${revision.data._slug}
|
|
679
|
+
WHERE id = ${id}
|
|
680
|
+
`.execute(this.db);
|
|
681
|
+
}
|
|
682
|
+
await sql`
|
|
683
|
+
UPDATE ${sql.ref(tableName)}
|
|
684
|
+
SET live_revision_id = ${revisionToPublish},
|
|
685
|
+
draft_revision_id = NULL,
|
|
686
|
+
status = 'published',
|
|
687
|
+
scheduled_at = NULL,
|
|
688
|
+
published_at = COALESCE(published_at, ${now}),
|
|
689
|
+
updated_at = ${now}
|
|
690
|
+
WHERE id = ${id}
|
|
691
|
+
AND deleted_at IS NULL
|
|
692
|
+
`.execute(this.db);
|
|
693
|
+
const updated = await this.findById(type, id);
|
|
694
|
+
if (!updated) throw new Error("Content not found");
|
|
695
|
+
return updated;
|
|
696
|
+
}
|
|
697
|
+
/**
|
|
698
|
+
* Unpublish content
|
|
699
|
+
*
|
|
700
|
+
* Removes live pointer but preserves draft. If no draft exists,
|
|
701
|
+
* creates one from the live version so the content isn't lost.
|
|
702
|
+
*/
|
|
703
|
+
async unpublish(type, id) {
|
|
704
|
+
const tableName = getTableName(type);
|
|
705
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
706
|
+
const existing = await this.findById(type, id);
|
|
707
|
+
if (!existing) throw new DinewayValidationError("Content item not found");
|
|
708
|
+
if (!existing.draftRevisionId && existing.liveRevisionId) {
|
|
709
|
+
const revisionRepo = new RevisionRepository(this.db);
|
|
710
|
+
const liveRevision = await revisionRepo.findById(existing.liveRevisionId);
|
|
711
|
+
if (liveRevision) {
|
|
712
|
+
const draft = await revisionRepo.create({
|
|
713
|
+
collection: type,
|
|
714
|
+
entryId: id,
|
|
715
|
+
data: liveRevision.data
|
|
716
|
+
});
|
|
717
|
+
await sql`
|
|
718
|
+
UPDATE ${sql.ref(tableName)}
|
|
719
|
+
SET draft_revision_id = ${draft.id}
|
|
720
|
+
WHERE id = ${id}
|
|
721
|
+
`.execute(this.db);
|
|
722
|
+
}
|
|
723
|
+
}
|
|
724
|
+
await sql`
|
|
725
|
+
UPDATE ${sql.ref(tableName)}
|
|
726
|
+
SET live_revision_id = NULL,
|
|
727
|
+
status = 'draft',
|
|
728
|
+
updated_at = ${now}
|
|
729
|
+
WHERE id = ${id}
|
|
730
|
+
AND deleted_at IS NULL
|
|
731
|
+
`.execute(this.db);
|
|
732
|
+
const updated = await this.findById(type, id);
|
|
733
|
+
if (!updated) throw new Error("Content not found");
|
|
734
|
+
return updated;
|
|
735
|
+
}
|
|
736
|
+
/**
|
|
737
|
+
* Discard pending draft changes
|
|
738
|
+
*
|
|
739
|
+
* Clears draft_revision_id. The content table columns already hold the
|
|
740
|
+
* published version, so no data sync is needed.
|
|
741
|
+
*/
|
|
742
|
+
async discardDraft(type, id) {
|
|
743
|
+
const tableName = getTableName(type);
|
|
744
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
745
|
+
const existing = await this.findById(type, id);
|
|
746
|
+
if (!existing) throw new DinewayValidationError("Content item not found");
|
|
747
|
+
if (!existing.draftRevisionId) return existing;
|
|
748
|
+
await sql`
|
|
749
|
+
UPDATE ${sql.ref(tableName)}
|
|
750
|
+
SET draft_revision_id = NULL,
|
|
751
|
+
updated_at = ${now}
|
|
752
|
+
WHERE id = ${id}
|
|
753
|
+
AND deleted_at IS NULL
|
|
754
|
+
`.execute(this.db);
|
|
755
|
+
const updated = await this.findById(type, id);
|
|
756
|
+
if (!updated) throw new Error("Content not found");
|
|
757
|
+
return updated;
|
|
758
|
+
}
|
|
759
|
+
/**
|
|
760
|
+
* Sync data columns in the content table from a data object.
|
|
761
|
+
* Used to promote revision data into the content table on publish.
|
|
762
|
+
* Keys starting with _ are revision metadata (e.g. _slug) and are skipped.
|
|
763
|
+
*/
|
|
764
|
+
async syncDataColumns(type, id, data) {
|
|
765
|
+
const tableName = getTableName(type);
|
|
766
|
+
const updates = {};
|
|
767
|
+
for (const [key, value] of Object.entries(data)) {
|
|
768
|
+
if (SYSTEM_COLUMNS.has(key)) continue;
|
|
769
|
+
if (key.startsWith("_")) continue;
|
|
770
|
+
updates[key] = serializeValue(value);
|
|
771
|
+
}
|
|
772
|
+
if (Object.keys(updates).length === 0) return;
|
|
773
|
+
await this.db.updateTable(tableName).set(updates).where("id", "=", id).execute();
|
|
774
|
+
}
|
|
775
|
+
/**
|
|
776
|
+
* Count content items with a pending schedule.
|
|
777
|
+
* Includes both draft-scheduled (status='scheduled') and published
|
|
778
|
+
* posts with scheduled draft changes (status='published', scheduled_at set).
|
|
779
|
+
*/
|
|
780
|
+
async countScheduled(type) {
|
|
781
|
+
const tableName = getTableName(type);
|
|
782
|
+
const result = await sql`
|
|
783
|
+
SELECT COUNT(id) as count FROM ${sql.ref(tableName)}
|
|
784
|
+
WHERE scheduled_at IS NOT NULL
|
|
785
|
+
AND deleted_at IS NULL
|
|
786
|
+
`.execute(this.db);
|
|
787
|
+
return Number(result.rows[0]?.count || 0);
|
|
788
|
+
}
|
|
789
|
+
/**
|
|
790
|
+
* Map database row to ContentItem
|
|
791
|
+
* Extracts system columns and puts content fields in data
|
|
792
|
+
* Excludes null values from data to match input semantics
|
|
793
|
+
*/
|
|
794
|
+
mapRow(type, row) {
|
|
795
|
+
const data = {};
|
|
796
|
+
for (const [key, value] of Object.entries(row)) if (!SYSTEM_COLUMNS.has(key) && value !== null) data[key] = deserializeValue(value);
|
|
797
|
+
return {
|
|
798
|
+
id: row.id,
|
|
799
|
+
type,
|
|
800
|
+
slug: row.slug,
|
|
801
|
+
status: row.status,
|
|
802
|
+
data,
|
|
803
|
+
authorId: row.author_id,
|
|
804
|
+
primaryBylineId: row.primary_byline_id ?? null,
|
|
805
|
+
createdAt: row.created_at,
|
|
806
|
+
updatedAt: row.updated_at,
|
|
807
|
+
publishedAt: row.published_at,
|
|
808
|
+
scheduledAt: row.scheduled_at,
|
|
809
|
+
liveRevisionId: row.live_revision_id ?? null,
|
|
810
|
+
draftRevisionId: row.draft_revision_id ?? null,
|
|
811
|
+
version: typeof row.version === "number" ? row.version : 1,
|
|
812
|
+
locale: row.locale ?? null,
|
|
813
|
+
translationGroup: row.translation_group ?? null
|
|
814
|
+
};
|
|
815
|
+
}
|
|
816
|
+
/**
|
|
817
|
+
* Map order field names to database columns.
|
|
818
|
+
* Only allows known fields to prevent column enumeration via crafted orderBy values.
|
|
819
|
+
*/
|
|
820
|
+
mapOrderField(field) {
|
|
821
|
+
const mapped = {
|
|
822
|
+
createdAt: "created_at",
|
|
823
|
+
updatedAt: "updated_at",
|
|
824
|
+
publishedAt: "published_at",
|
|
825
|
+
scheduledAt: "scheduled_at",
|
|
826
|
+
deletedAt: "deleted_at",
|
|
827
|
+
title: "title",
|
|
828
|
+
slug: "slug"
|
|
829
|
+
}[field];
|
|
830
|
+
if (!mapped) throw new DinewayValidationError(`Invalid order field: ${field}`);
|
|
831
|
+
return mapped;
|
|
832
|
+
}
|
|
833
|
+
};
|
|
834
|
+
|
|
835
|
+
//#endregion
|
|
836
|
+
export { slugify as i, content_exports as n, RevisionRepository as r, ContentRepository as t };
|