@elek-io/core 0.15.0 → 0.15.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/index.browser.d.ts +13520 -12616
- package/dist/browser/index.browser.js +1 -1150
- package/dist/browser/index.browser.js.map +1 -1
- package/dist/cli/index.cli.js +4004 -0
- package/dist/node/chunk-Bp6m_JJh.js +13 -0
- package/dist/node/index.node.d.ts +13518 -12616
- package/dist/node/index.node.js +3558 -4319
- package/dist/node/index.node.js.map +1 -1
- package/package.json +27 -20
|
@@ -0,0 +1,4004 @@
|
|
|
1
|
+
#! /usr/bin/env node
|
|
2
|
+
import { Command } from "@commander-js/extra-typings";
|
|
3
|
+
import Path from "path";
|
|
4
|
+
import Fs from "fs-extra";
|
|
5
|
+
import { build } from "tsdown";
|
|
6
|
+
import CodeBlockWriter from "code-block-writer";
|
|
7
|
+
import assert from "assert";
|
|
8
|
+
import chokidar from "chokidar";
|
|
9
|
+
import { serve } from "@hono/node-server";
|
|
10
|
+
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
|
11
|
+
import { requestId } from "hono/request-id";
|
|
12
|
+
import { createMiddleware } from "hono/factory";
|
|
13
|
+
import { cors } from "hono/cors";
|
|
14
|
+
import { trimTrailingSlash } from "hono/trailing-slash";
|
|
15
|
+
import { z as z$1 } from "zod";
|
|
16
|
+
import { Scalar } from "@scalar/hono-api-reference";
|
|
17
|
+
import Os from "os";
|
|
18
|
+
import { execFile } from "child_process";
|
|
19
|
+
import mime from "mime";
|
|
20
|
+
import slugify from "@sindresorhus/slugify";
|
|
21
|
+
import { v4 } from "uuid";
|
|
22
|
+
import { GitProcess } from "dugite";
|
|
23
|
+
import PQueue from "p-queue";
|
|
24
|
+
import { createLogger, format, transports } from "winston";
|
|
25
|
+
import DailyRotateFile from "winston-daily-rotate-file";
|
|
26
|
+
import Semver from "semver";
|
|
27
|
+
|
|
28
|
+
//#region rolldown:runtime
|
|
29
|
+
var __defProp = Object.defineProperty;
|
|
30
|
+
var __export = (all) => {
|
|
31
|
+
let target = {};
|
|
32
|
+
for (var name$1 in all) __defProp(target, name$1, {
|
|
33
|
+
get: all[name$1],
|
|
34
|
+
enumerable: true
|
|
35
|
+
});
|
|
36
|
+
return target;
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
//#endregion
|
|
40
|
+
//#region package.json
|
|
41
|
+
var package_default = {
|
|
42
|
+
name: "@elek-io/core",
|
|
43
|
+
version: "0.15.2",
|
|
44
|
+
description: "Handles core functionality of elek.io Projects like file IO and version control.",
|
|
45
|
+
homepage: "https://elek.io",
|
|
46
|
+
repository: "https://github.com/elek-io/core",
|
|
47
|
+
bugs: { "url": "https://github.com/elek-io/core/issues" },
|
|
48
|
+
type: "module",
|
|
49
|
+
bin: { "elek": "./dist/cli/index.cli.js" },
|
|
50
|
+
files: ["dist/node", "dist/browser"],
|
|
51
|
+
exports: { ".": {
|
|
52
|
+
"node": { "import": {
|
|
53
|
+
"types": "./dist/node/index.node.d.ts",
|
|
54
|
+
"default": "./dist/node/index.node.js"
|
|
55
|
+
} },
|
|
56
|
+
"import": {
|
|
57
|
+
"types": "./dist/browser/index.browser.d.ts",
|
|
58
|
+
"default": "./dist/browser/index.browser.js"
|
|
59
|
+
}
|
|
60
|
+
} },
|
|
61
|
+
pnpm: { "overrides": {} },
|
|
62
|
+
scripts: {
|
|
63
|
+
"lint": "eslint",
|
|
64
|
+
"check-types": "tsc --noEmit",
|
|
65
|
+
"check-format": "prettier --check . || exit 0",
|
|
66
|
+
"format": "prettier --write .",
|
|
67
|
+
"dev": "vitest",
|
|
68
|
+
"test": "vitest run",
|
|
69
|
+
"coverage": "vitest run --coverage",
|
|
70
|
+
"build": "tsdown",
|
|
71
|
+
"release": "changeset publish"
|
|
72
|
+
},
|
|
73
|
+
dependencies: {
|
|
74
|
+
"@commander-js/extra-typings": "14.0.0",
|
|
75
|
+
"@hono/node-server": "1.19.5",
|
|
76
|
+
"@hono/zod-openapi": "1.1.4",
|
|
77
|
+
"@scalar/hono-api-reference": "0.9.22",
|
|
78
|
+
"@sindresorhus/slugify": "3.0.0",
|
|
79
|
+
"chokidar": "4.0.3",
|
|
80
|
+
"code-block-writer": "13.0.3",
|
|
81
|
+
"commander": "14.0.2",
|
|
82
|
+
"fs-extra": "11.3.2",
|
|
83
|
+
"hono": "4.10.4",
|
|
84
|
+
"mime": "4.1.0",
|
|
85
|
+
"p-queue": "9.0.0",
|
|
86
|
+
"semver": "7.7.3",
|
|
87
|
+
"tsdown": "0.15.12",
|
|
88
|
+
"uuid": "13.0.0",
|
|
89
|
+
"winston": "3.18.3",
|
|
90
|
+
"winston-daily-rotate-file": "5.0.0",
|
|
91
|
+
"zod": "4.1.12"
|
|
92
|
+
},
|
|
93
|
+
devDependencies: {
|
|
94
|
+
"@changesets/cli": "2.29.7",
|
|
95
|
+
"@eslint/js": "9.38.0",
|
|
96
|
+
"@faker-js/faker": "10.1.0",
|
|
97
|
+
"@tsconfig/node22": "22.0.2",
|
|
98
|
+
"@tsconfig/strictest": "2.0.7",
|
|
99
|
+
"@types/fs-extra": "11.0.4",
|
|
100
|
+
"@types/node": "22.18.13",
|
|
101
|
+
"@types/semver": "7.7.1",
|
|
102
|
+
"@vitest/coverage-v8": "4.0.5",
|
|
103
|
+
"eslint": "9.38.0",
|
|
104
|
+
"eslint-config-prettier": "10.1.8",
|
|
105
|
+
"globals": "16.4.0",
|
|
106
|
+
"jiti": "2.6.1",
|
|
107
|
+
"prettier": "3.6.2",
|
|
108
|
+
"typescript": "5.9.3",
|
|
109
|
+
"typescript-eslint": "8.46.2",
|
|
110
|
+
"vitest": "4.0.5"
|
|
111
|
+
},
|
|
112
|
+
peerDependencies: { "dugite": "2.7.1" }
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
//#endregion
|
|
116
|
+
//#region src/cli/exportAction.ts
|
|
117
|
+
async function exportProjects({ outDir, projects, options }) {
|
|
118
|
+
const projectsToExport = [];
|
|
119
|
+
const resolvedOutDir = Path.resolve(outDir);
|
|
120
|
+
await Fs.ensureDir(resolvedOutDir);
|
|
121
|
+
let content = {};
|
|
122
|
+
if (projects === "all") projectsToExport.push(...(await core.projects.list({ limit: 0 })).list);
|
|
123
|
+
else for (const projectId of projects) projectsToExport.push(await core.projects.read({ id: projectId }));
|
|
124
|
+
for (const project of projectsToExport) {
|
|
125
|
+
const assets = (await core.assets.list({
|
|
126
|
+
projectId: project.id,
|
|
127
|
+
limit: 0
|
|
128
|
+
})).list;
|
|
129
|
+
let assetContent = {};
|
|
130
|
+
for (const asset of assets) assetContent = {
|
|
131
|
+
...assetContent,
|
|
132
|
+
[asset.id]: { ...asset }
|
|
133
|
+
};
|
|
134
|
+
let collectionContent = {};
|
|
135
|
+
const collections = (await core.collections.list({
|
|
136
|
+
projectId: project.id,
|
|
137
|
+
limit: 0
|
|
138
|
+
})).list;
|
|
139
|
+
for (const collection of collections) {
|
|
140
|
+
let entryContent = {};
|
|
141
|
+
const entries = (await core.entries.list({
|
|
142
|
+
projectId: project.id,
|
|
143
|
+
collectionId: collection.id,
|
|
144
|
+
limit: 0
|
|
145
|
+
})).list;
|
|
146
|
+
for (const entry of entries) entryContent = {
|
|
147
|
+
...entryContent,
|
|
148
|
+
[entry.id]: { ...entry }
|
|
149
|
+
};
|
|
150
|
+
collectionContent = {
|
|
151
|
+
...collectionContent,
|
|
152
|
+
[collection.id]: {
|
|
153
|
+
...collection,
|
|
154
|
+
entries: entryContent
|
|
155
|
+
}
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
content = {
|
|
159
|
+
...content,
|
|
160
|
+
[project.id]: {
|
|
161
|
+
...project,
|
|
162
|
+
assets: assetContent,
|
|
163
|
+
collections: collectionContent
|
|
164
|
+
}
|
|
165
|
+
};
|
|
166
|
+
}
|
|
167
|
+
if (options.separate === true) for (const project of projectsToExport) await Fs.writeFile(Path.join(resolvedOutDir, `project-${project.id}.json`), JSON.stringify(content[project.id], null, 2));
|
|
168
|
+
else await Fs.writeFile(Path.join(resolvedOutDir, "projects.json"), JSON.stringify(content, null, 2));
|
|
169
|
+
}
|
|
170
|
+
const exportAction = async ({ outDir, projects, options }) => {
|
|
171
|
+
await exportProjects({
|
|
172
|
+
outDir,
|
|
173
|
+
projects,
|
|
174
|
+
options
|
|
175
|
+
});
|
|
176
|
+
if (options.watch === true) {
|
|
177
|
+
core.logger.info({
|
|
178
|
+
source: "core",
|
|
179
|
+
message: "Watching for changes to export Projects"
|
|
180
|
+
});
|
|
181
|
+
watchProjects().on("all", (event, path) => {
|
|
182
|
+
core.logger.info({
|
|
183
|
+
source: "core",
|
|
184
|
+
message: `Re-Exporting Projects due to ${event} on "${path}"`
|
|
185
|
+
});
|
|
186
|
+
exportProjects({
|
|
187
|
+
outDir,
|
|
188
|
+
projects,
|
|
189
|
+
options
|
|
190
|
+
});
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
};
|
|
194
|
+
|
|
195
|
+
//#endregion
|
|
196
|
+
//#region src/cli/startApiAction.ts
|
|
197
|
+
const startApiAction = ({ port }) => {
|
|
198
|
+
return core.api.start(port);
|
|
199
|
+
};
|
|
200
|
+
|
|
201
|
+
//#endregion
|
|
202
|
+
//#region src/cli/generateApiClientAction.ts
|
|
203
|
+
/**
|
|
204
|
+
* API Client generator
|
|
205
|
+
*
|
|
206
|
+
* Generates an API client with full type safety in given folder
|
|
207
|
+
* based on the locally available Projects, Collections and Entries.
|
|
208
|
+
* Uses a generated schema based on the field definitions
|
|
209
|
+
* of Collections to provide correct types for available Entries.
|
|
210
|
+
*
|
|
211
|
+
* @example
|
|
212
|
+
* Usage: Import the generated client and use it to access the local content API
|
|
213
|
+
*
|
|
214
|
+
* ```ts
|
|
215
|
+
* import { apiClient } from './.elek.io/client.js';
|
|
216
|
+
*
|
|
217
|
+
* const client = await apiClient({
|
|
218
|
+
* baseUrl: 'http://localhost:31310',
|
|
219
|
+
* apiKey: '<token>'
|
|
220
|
+
* }).content.v1;
|
|
221
|
+
*
|
|
222
|
+
* const entries = await client
|
|
223
|
+
* .projects['d9920ad7-07b8-41c4-84f7-5d6babf0f800']
|
|
224
|
+
* .collections['7fc70100-82b3-41f8-b4de-705a84b0a95d']
|
|
225
|
+
* .entries.list({
|
|
226
|
+
* limit: 10,
|
|
227
|
+
* })
|
|
228
|
+
*
|
|
229
|
+
* console.log(entries);
|
|
230
|
+
* ```
|
|
231
|
+
*/
|
|
232
|
+
async function generateApiClient(outFile) {
|
|
233
|
+
const writer = new CodeBlockWriter({
|
|
234
|
+
newLine: "\n",
|
|
235
|
+
indentNumberOfSpaces: 2,
|
|
236
|
+
useTabs: false,
|
|
237
|
+
useSingleQuote: true
|
|
238
|
+
});
|
|
239
|
+
writer.writeLine(`import { paginatedListOf, getEntrySchemaFromFieldDefinitions } from '@elek-io/core';`);
|
|
240
|
+
writer.writeLine(`import { z } from 'zod';`);
|
|
241
|
+
writer.blankLine();
|
|
242
|
+
writer.writeLine(`const listSchema = z.object({ limit: z.number().optional(), offset: z.number().optional() }).optional();`);
|
|
243
|
+
writer.writeLine(`type ListProps = z.infer<typeof listSchema>;`);
|
|
244
|
+
writer.blankLine();
|
|
245
|
+
writer.writeLine(`const apiClientSchema = z.object({ baseUrl: z.url(), apiKey: z.string() });`);
|
|
246
|
+
writer.writeLine(`type ApiClientProps = z.infer<typeof apiClientSchema>;`);
|
|
247
|
+
writer.blankLine();
|
|
248
|
+
writer.writeLine(`/**`);
|
|
249
|
+
writer.writeLine(` * elek.io Client`);
|
|
250
|
+
writer.writeLine(` * `);
|
|
251
|
+
writer.writeLine(` * Used to access elek.io APIs.`);
|
|
252
|
+
writer.writeLine(` */`);
|
|
253
|
+
writer.writeLine(`export function apiClient({ baseUrl, apiKey }: ApiClientProps) {`);
|
|
254
|
+
writer.indent(1).write(`apiClientSchema.parse({ baseUrl, apiKey });`).newLine();
|
|
255
|
+
writer.blankLine();
|
|
256
|
+
writer.indent(1).write(`return {`).newLine();
|
|
257
|
+
writer.indent(2).write(`content: {`).newLine();
|
|
258
|
+
writer.indent(3).write(`v1: {`).newLine();
|
|
259
|
+
writer.indent(4).write(`projects: {`).newLine();
|
|
260
|
+
await writeProjectsObject(writer);
|
|
261
|
+
writer.indent(4).write(`}`).newLine();
|
|
262
|
+
writer.indent(3).write(`}`).newLine();
|
|
263
|
+
writer.indent(2).write(`}`).newLine();
|
|
264
|
+
writer.indent(1).write(`}`).newLine();
|
|
265
|
+
writer.writeLine(`}`);
|
|
266
|
+
await Fs.writeFile(outFile, writer.toString());
|
|
267
|
+
core.logger.info({
|
|
268
|
+
source: "core",
|
|
269
|
+
message: `Generated API Client in "${outFile}"`
|
|
270
|
+
});
|
|
271
|
+
}
|
|
272
|
+
async function writeProjectsObject(writer) {
|
|
273
|
+
const projects = await core.projects.list({
|
|
274
|
+
limit: 0,
|
|
275
|
+
offset: 0
|
|
276
|
+
});
|
|
277
|
+
for (let index = 0; index < projects.list.length; index++) {
|
|
278
|
+
const project = projects.list[index];
|
|
279
|
+
assert(project, "Project not found by index");
|
|
280
|
+
writer.indent(1).quote(project.id).write(`: {`).newLine();
|
|
281
|
+
writer.indent(2).write(`collections: {`).newLine();
|
|
282
|
+
await writeCollectionsObject(writer, project);
|
|
283
|
+
writer.indent(2).write(`},`).newLine();
|
|
284
|
+
writer.indent(1).write(`},`).newLine();
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
async function writeCollectionsObject(writer, project) {
|
|
288
|
+
const collections = await core.collections.list({
|
|
289
|
+
projectId: project.id,
|
|
290
|
+
limit: 0,
|
|
291
|
+
offset: 0
|
|
292
|
+
});
|
|
293
|
+
for (let index = 0; index < collections.list.length; index++) {
|
|
294
|
+
const collection = collections.list[index];
|
|
295
|
+
assert(collection, "Collection not found by index");
|
|
296
|
+
writer.indent(3).quote(collection.id).write(`: {`).newLine();
|
|
297
|
+
writer.indent(4).write(`entries: {`).newLine();
|
|
298
|
+
writeEntriesObject(writer, project, collection);
|
|
299
|
+
writer.indent(4).write(`},`).newLine();
|
|
300
|
+
writer.indent(3).write(`},`).newLine();
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
function writeEntriesObject(writer, project, collection) {
|
|
304
|
+
writer.indent(5).write(`list: async (props?: ListProps) => {`).newLine();
|
|
305
|
+
writer.indent(6).write(`listSchema.parse(props);`).newLine();
|
|
306
|
+
writer.indent(6).write(`const entrySchema = paginatedListOf(getEntrySchemaFromFieldDefinitions(`).newLine();
|
|
307
|
+
writer.setIndentationLevel(6);
|
|
308
|
+
writer.indent(() => {
|
|
309
|
+
writer.write(JSON.stringify(collection.fieldDefinitions, null, 2)).newLine();
|
|
310
|
+
});
|
|
311
|
+
writer.setIndentationLevel(0);
|
|
312
|
+
writer.indent(6).write(`));`).newLine();
|
|
313
|
+
writer.blankLine();
|
|
314
|
+
writeFetch(writer, `/content/v1/projects/${project.id}/collections/${collection.id}/entries`);
|
|
315
|
+
writer.blankLine();
|
|
316
|
+
writer.indent(6).write(`return entrySchema.parse(entries);`).newLine();
|
|
317
|
+
writer.indent(5).write(`},`).newLine();
|
|
318
|
+
}
|
|
319
|
+
function writeFetch(writer, to, method = "GET") {
|
|
320
|
+
writer.write("const response = await fetch(`${baseUrl}").write(to).write("`, ").block(() => {
|
|
321
|
+
writer.writeLine(`method: '${method}',`);
|
|
322
|
+
writer.write(`headers: `).block(() => {
|
|
323
|
+
writer.writeLine(`'Authorization': \`Bearer \${apiKey}\`,`);
|
|
324
|
+
writer.writeLine(`'Content-Type': 'application/json'`);
|
|
325
|
+
}).newLine();
|
|
326
|
+
}).write(`);`).newLine();
|
|
327
|
+
writer.writeLine(`const entries = await response.json();`);
|
|
328
|
+
}
|
|
329
|
+
async function generateApiClientAs({ outDir, language, format: format$1, target }) {
|
|
330
|
+
const resolvedOutDir = Path.resolve(outDir);
|
|
331
|
+
await Fs.ensureDir(resolvedOutDir);
|
|
332
|
+
const outFileTs = Path.join(resolvedOutDir, "client.ts");
|
|
333
|
+
await generateApiClient(outFileTs);
|
|
334
|
+
if (language === "js") {
|
|
335
|
+
await build({
|
|
336
|
+
config: false,
|
|
337
|
+
external: ["@elek-io/core", "zod"],
|
|
338
|
+
entry: outFileTs.split(Path.sep).join(Path.posix.sep),
|
|
339
|
+
outDir: resolvedOutDir,
|
|
340
|
+
format: format$1,
|
|
341
|
+
target,
|
|
342
|
+
sourcemap: true,
|
|
343
|
+
clean: false,
|
|
344
|
+
dts: true,
|
|
345
|
+
minify: true
|
|
346
|
+
});
|
|
347
|
+
await Fs.remove(outFileTs);
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
const generateApiClientAction = async ({ outDir, language, format: format$1, target, options }) => {
|
|
351
|
+
await generateApiClientAs({
|
|
352
|
+
outDir,
|
|
353
|
+
language,
|
|
354
|
+
format: format$1,
|
|
355
|
+
target,
|
|
356
|
+
options
|
|
357
|
+
});
|
|
358
|
+
if (options.watch === true) {
|
|
359
|
+
core.logger.info({
|
|
360
|
+
source: "core",
|
|
361
|
+
message: "Watching for changes to regenerate the API Client"
|
|
362
|
+
});
|
|
363
|
+
watchProjects().on("all", (event, path) => {
|
|
364
|
+
core.logger.info({
|
|
365
|
+
source: "core",
|
|
366
|
+
message: `Regenerating API Client due to ${event} on "${path}"`
|
|
367
|
+
});
|
|
368
|
+
generateApiClientAs({
|
|
369
|
+
outDir,
|
|
370
|
+
language,
|
|
371
|
+
format: format$1,
|
|
372
|
+
target,
|
|
373
|
+
options
|
|
374
|
+
});
|
|
375
|
+
});
|
|
376
|
+
}
|
|
377
|
+
};
|
|
378
|
+
|
|
379
|
+
//#endregion
|
|
380
|
+
//#region src/api/middleware/requestResponseLogger.ts
|
|
381
|
+
/**
|
|
382
|
+
* Middleware that logs the details of each request and response
|
|
383
|
+
*/
|
|
384
|
+
const requestResponseLogger = createMiddleware(async (c, next) => {
|
|
385
|
+
const { method, url } = c.req;
|
|
386
|
+
const requestId$1 = c.get("requestId");
|
|
387
|
+
c.var.logService.info({
|
|
388
|
+
source: "core",
|
|
389
|
+
message: `Recieved API request "${method} ${url}" with requestId ${requestId$1}`
|
|
390
|
+
});
|
|
391
|
+
const start = Date.now();
|
|
392
|
+
await next();
|
|
393
|
+
const durationMs = Date.now() - start;
|
|
394
|
+
const statusCode = c.res.status.toString();
|
|
395
|
+
const resultLog = {
|
|
396
|
+
source: "core",
|
|
397
|
+
message: `Response for API request "${method} ${url}" with requestId ${requestId$1} and status code ${statusCode} in ${durationMs}ms`
|
|
398
|
+
};
|
|
399
|
+
if (statusCode.startsWith("2")) c.var.logService.info(resultLog);
|
|
400
|
+
else if (statusCode.startsWith("3")) c.var.logService.warn(resultLog);
|
|
401
|
+
else if (statusCode.startsWith("4") || statusCode.startsWith("5")) c.var.logService.error(resultLog);
|
|
402
|
+
});
|
|
403
|
+
|
|
404
|
+
//#endregion
|
|
405
|
+
//#region src/api/lib/util.ts
|
|
406
|
+
/**
|
|
407
|
+
* Creates a new OpenAPIHono router with default settings
|
|
408
|
+
*/
|
|
409
|
+
function createRouter() {
|
|
410
|
+
return new OpenAPIHono({ defaultHook: (result, c) => {
|
|
411
|
+
if (!result.success) return c.json({
|
|
412
|
+
success: result.success,
|
|
413
|
+
error: {
|
|
414
|
+
name: result.error.name,
|
|
415
|
+
issues: result.error.issues
|
|
416
|
+
}
|
|
417
|
+
}, 422);
|
|
418
|
+
return result;
|
|
419
|
+
} });
|
|
420
|
+
}
|
|
421
|
+
/**
|
|
422
|
+
* Creates a new OpenAPIHono instance, injects services into context and adds error handling
|
|
423
|
+
*/
|
|
424
|
+
function createApi(logService, projectService, collectionService, entryService, assetService) {
|
|
425
|
+
const api = createRouter();
|
|
426
|
+
api.use(requestId()).use(trimTrailingSlash()).use(cors({ origin: ["http://localhost"] })).use(createMiddleware((c, next) => {
|
|
427
|
+
c.set("logService", logService);
|
|
428
|
+
c.set("projectService", projectService);
|
|
429
|
+
c.set("collectionService", collectionService);
|
|
430
|
+
c.set("entryService", entryService);
|
|
431
|
+
c.set("assetService", assetService);
|
|
432
|
+
return next();
|
|
433
|
+
})).use(requestResponseLogger);
|
|
434
|
+
api.notFound((c) => {
|
|
435
|
+
return c.json({ message: `Not Found - ${c.req.path}` }, 404);
|
|
436
|
+
});
|
|
437
|
+
api.onError((err, c) => {
|
|
438
|
+
const currentStatus = "status" in err ? err.status : c.newResponse(null).status;
|
|
439
|
+
const statusCode = currentStatus !== 200 ? currentStatus : 500;
|
|
440
|
+
return c.json({
|
|
441
|
+
message: err.message,
|
|
442
|
+
stack: err.stack
|
|
443
|
+
}, statusCode);
|
|
444
|
+
});
|
|
445
|
+
return api;
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
//#endregion
|
|
449
|
+
//#region src/schema/baseSchema.ts
|
|
450
|
+
/**
|
|
451
|
+
* All currently supported, BCP 47 compliant language tags
|
|
452
|
+
*
|
|
453
|
+
* The support depends on the tools and libraries we use.
|
|
454
|
+
* We can't support a given language, if there is no support
|
|
455
|
+
* for it from used third parties. Currently, to check if a langauge
|
|
456
|
+
* tag can be added to this list, it needs to be supported by:
|
|
457
|
+
* - DeepL translation API
|
|
458
|
+
*
|
|
459
|
+
* @see https://www.deepl.com/docs-api/other-functions/listing-supported-languages/
|
|
460
|
+
*/
|
|
461
|
+
const supportedLanguageSchema = z.enum([
|
|
462
|
+
"bg",
|
|
463
|
+
"cs",
|
|
464
|
+
"da",
|
|
465
|
+
"de",
|
|
466
|
+
"el",
|
|
467
|
+
"en",
|
|
468
|
+
"es",
|
|
469
|
+
"et",
|
|
470
|
+
"fi",
|
|
471
|
+
"fr",
|
|
472
|
+
"hu",
|
|
473
|
+
"it",
|
|
474
|
+
"ja",
|
|
475
|
+
"lt",
|
|
476
|
+
"lv",
|
|
477
|
+
"nl",
|
|
478
|
+
"pl",
|
|
479
|
+
"pt",
|
|
480
|
+
"ro",
|
|
481
|
+
"ru",
|
|
482
|
+
"sk",
|
|
483
|
+
"sl",
|
|
484
|
+
"sv",
|
|
485
|
+
"zh"
|
|
486
|
+
]);
|
|
487
|
+
const supportedIconSchema = z.enum([
|
|
488
|
+
"home",
|
|
489
|
+
"plus",
|
|
490
|
+
"foobar"
|
|
491
|
+
]);
|
|
492
|
+
const objectTypeSchema = z.enum([
|
|
493
|
+
"project",
|
|
494
|
+
"asset",
|
|
495
|
+
"collection",
|
|
496
|
+
"entry",
|
|
497
|
+
"value",
|
|
498
|
+
"sharedValue"
|
|
499
|
+
]);
|
|
500
|
+
const logLevelSchema = z.enum([
|
|
501
|
+
"error",
|
|
502
|
+
"warn",
|
|
503
|
+
"info",
|
|
504
|
+
"debug"
|
|
505
|
+
]);
|
|
506
|
+
const versionSchema = z.string();
|
|
507
|
+
const uuidSchema = z.uuid("shared.invalidUuid");
|
|
508
|
+
/**
|
|
509
|
+
* A record that can be used to translate a string value into all supported languages
|
|
510
|
+
*/
|
|
511
|
+
const translatableStringSchema = z.partialRecord(supportedLanguageSchema, z.string().trim().min(1, "shared.translatableStringRequired"));
|
|
512
|
+
/**
|
|
513
|
+
* A record that can be used to translate a number value into all supported languages
|
|
514
|
+
*/
|
|
515
|
+
const translatableNumberSchema = z.partialRecord(supportedLanguageSchema, z.number({ error: (error) => error.input === void 0 ? "shared.translatableNumberRequired" : "shared.translatableNumberNotANumber" }));
|
|
516
|
+
/**
|
|
517
|
+
* A record that can be used to translate a boolean value into all supported languages
|
|
518
|
+
*/
|
|
519
|
+
const translatableBooleanSchema = z.partialRecord(supportedLanguageSchema, z.boolean({ error: (error) => error.input === void 0 ? "shared.translatableBooleanRequired" : "shared.translatableBooleanNotABoolean" }));
|
|
520
|
+
function translatableArrayOf(schema) {
|
|
521
|
+
return z.partialRecord(supportedLanguageSchema, z.array(schema));
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
//#endregion
|
|
525
|
+
//#region src/schema/fileSchema.ts
|
|
526
|
+
/**
|
|
527
|
+
* A basic file structure every elek.io file on disk has to follow
|
|
528
|
+
*/
|
|
529
|
+
const baseFileSchema = z.object({
|
|
530
|
+
objectType: objectTypeSchema.readonly(),
|
|
531
|
+
id: uuidSchema.readonly(),
|
|
532
|
+
created: z.string().datetime().readonly(),
|
|
533
|
+
updated: z.string().datetime().nullable()
|
|
534
|
+
});
|
|
535
|
+
const fileReferenceSchema = z.object({
|
|
536
|
+
id: uuidSchema,
|
|
537
|
+
extension: z.string().optional()
|
|
538
|
+
});
|
|
539
|
+
|
|
540
|
+
//#endregion
|
|
541
|
+
//#region src/schema/gitSchema.ts
|
|
542
|
+
/**
|
|
543
|
+
* Signature git uses to identify users
|
|
544
|
+
*/
|
|
545
|
+
const gitSignatureSchema = z.object({
|
|
546
|
+
name: z.string(),
|
|
547
|
+
email: z.string().email()
|
|
548
|
+
});
|
|
549
|
+
const gitMessageSchema = z.object({
|
|
550
|
+
method: z.enum([
|
|
551
|
+
"create",
|
|
552
|
+
"update",
|
|
553
|
+
"delete",
|
|
554
|
+
"upgrade"
|
|
555
|
+
]),
|
|
556
|
+
reference: z.object({
|
|
557
|
+
objectType: objectTypeSchema,
|
|
558
|
+
id: uuidSchema,
|
|
559
|
+
collectionId: uuidSchema.optional()
|
|
560
|
+
})
|
|
561
|
+
});
|
|
562
|
+
const gitTagSchema = z.object({
|
|
563
|
+
id: uuidSchema,
|
|
564
|
+
message: z.string(),
|
|
565
|
+
author: gitSignatureSchema,
|
|
566
|
+
datetime: z.string().datetime()
|
|
567
|
+
});
|
|
568
|
+
const gitCommitSchema = z.object({
|
|
569
|
+
hash: z.string(),
|
|
570
|
+
message: gitMessageSchema,
|
|
571
|
+
author: gitSignatureSchema,
|
|
572
|
+
datetime: z.string().datetime(),
|
|
573
|
+
tag: gitTagSchema.nullable()
|
|
574
|
+
});
|
|
575
|
+
const gitInitOptionsSchema = z.object({ initialBranch: z.string() });
|
|
576
|
+
const gitCloneOptionsSchema = z.object({
|
|
577
|
+
depth: z.number(),
|
|
578
|
+
singleBranch: z.boolean(),
|
|
579
|
+
branch: z.string(),
|
|
580
|
+
bare: z.boolean()
|
|
581
|
+
});
|
|
582
|
+
const gitMergeOptionsSchema = z.object({ squash: z.boolean() });
|
|
583
|
+
const gitSwitchOptionsSchema = z.object({ isNew: z.boolean().optional() });
|
|
584
|
+
const gitLogOptionsSchema = z.object({
|
|
585
|
+
limit: z.number().optional(),
|
|
586
|
+
between: z.object({
|
|
587
|
+
from: z.string(),
|
|
588
|
+
to: z.string().optional()
|
|
589
|
+
}),
|
|
590
|
+
filePath: z.string().optional()
|
|
591
|
+
});
|
|
592
|
+
const createGitTagSchema = gitTagSchema.pick({ message: true }).extend({
|
|
593
|
+
path: z.string(),
|
|
594
|
+
hash: z.string().optional()
|
|
595
|
+
});
|
|
596
|
+
const readGitTagSchema = z.object({
|
|
597
|
+
path: z.string(),
|
|
598
|
+
id: uuidSchema.readonly()
|
|
599
|
+
});
|
|
600
|
+
const deleteGitTagSchema = readGitTagSchema.extend({});
|
|
601
|
+
const countGitTagsSchema = z.object({ path: z.string() });
|
|
602
|
+
|
|
603
|
+
//#endregion
|
|
604
|
+
//#region src/schema/assetSchema.ts
|
|
605
|
+
const assetFileSchema = baseFileSchema.extend({
|
|
606
|
+
objectType: z.literal(objectTypeSchema.enum.asset).readonly(),
|
|
607
|
+
name: z.string(),
|
|
608
|
+
description: z.string(),
|
|
609
|
+
extension: z.string().readonly(),
|
|
610
|
+
mimeType: z.string().readonly(),
|
|
611
|
+
size: z.number().readonly()
|
|
612
|
+
});
|
|
613
|
+
const assetSchema = assetFileSchema.extend({
|
|
614
|
+
absolutePath: z.string().readonly(),
|
|
615
|
+
history: z.array(gitCommitSchema)
|
|
616
|
+
}).openapi("Asset");
|
|
617
|
+
const assetExportSchema = assetSchema.extend({});
|
|
618
|
+
const createAssetSchema = assetFileSchema.pick({
|
|
619
|
+
name: true,
|
|
620
|
+
description: true
|
|
621
|
+
}).extend({
|
|
622
|
+
projectId: uuidSchema.readonly(),
|
|
623
|
+
filePath: z.string().readonly()
|
|
624
|
+
});
|
|
625
|
+
const readAssetSchema = assetFileSchema.pick({ id: true }).extend({
|
|
626
|
+
projectId: uuidSchema.readonly(),
|
|
627
|
+
commitHash: z.string().optional().readonly()
|
|
628
|
+
});
|
|
629
|
+
const saveAssetSchema = assetFileSchema.pick({ id: true }).extend({
|
|
630
|
+
projectId: uuidSchema.readonly(),
|
|
631
|
+
filePath: z.string().readonly(),
|
|
632
|
+
commitHash: z.string().optional().readonly()
|
|
633
|
+
});
|
|
634
|
+
const updateAssetSchema = assetFileSchema.pick({
|
|
635
|
+
id: true,
|
|
636
|
+
name: true,
|
|
637
|
+
description: true
|
|
638
|
+
}).extend({
|
|
639
|
+
projectId: uuidSchema.readonly(),
|
|
640
|
+
newFilePath: z.string().readonly().optional()
|
|
641
|
+
});
|
|
642
|
+
const deleteAssetSchema = assetFileSchema.pick({
|
|
643
|
+
id: true,
|
|
644
|
+
extension: true
|
|
645
|
+
}).extend({ projectId: uuidSchema.readonly() });
|
|
646
|
+
const countAssetsSchema = z.object({ projectId: uuidSchema.readonly() });
|
|
647
|
+
|
|
648
|
+
//#endregion
|
|
649
|
+
//#region src/schema/valueSchema.ts
|
|
650
|
+
const ValueTypeSchema = z.enum([
|
|
651
|
+
"string",
|
|
652
|
+
"number",
|
|
653
|
+
"boolean",
|
|
654
|
+
"reference"
|
|
655
|
+
]);
|
|
656
|
+
const valueContentReferenceBase = z.object({ id: uuidSchema });
|
|
657
|
+
const valueContentReferenceToAssetSchema = valueContentReferenceBase.extend({ objectType: z.literal(objectTypeSchema.enum.asset) });
|
|
658
|
+
const valueContentReferenceToCollectionSchema = valueContentReferenceBase.extend({ objectType: z.literal(objectTypeSchema.enum.collection) });
|
|
659
|
+
const valueContentReferenceToEntrySchema = valueContentReferenceBase.extend({ objectType: z.literal(objectTypeSchema.enum.entry) });
|
|
660
|
+
const valueContentReferenceSchema = z.union([
|
|
661
|
+
valueContentReferenceToAssetSchema,
|
|
662
|
+
valueContentReferenceToCollectionSchema,
|
|
663
|
+
valueContentReferenceToEntrySchema
|
|
664
|
+
]);
|
|
665
|
+
const directValueBaseSchema = z.object({
|
|
666
|
+
objectType: z.literal(objectTypeSchema.enum.value).readonly(),
|
|
667
|
+
fieldDefinitionId: uuidSchema.readonly()
|
|
668
|
+
});
|
|
669
|
+
const directStringValueSchema = directValueBaseSchema.extend({
|
|
670
|
+
valueType: z.literal(ValueTypeSchema.enum.string).readonly(),
|
|
671
|
+
content: translatableStringSchema
|
|
672
|
+
});
|
|
673
|
+
const directNumberValueSchema = directValueBaseSchema.extend({
|
|
674
|
+
valueType: z.literal(ValueTypeSchema.enum.number).readonly(),
|
|
675
|
+
content: translatableNumberSchema
|
|
676
|
+
});
|
|
677
|
+
const directBooleanValueSchema = directValueBaseSchema.extend({
|
|
678
|
+
valueType: z.literal(ValueTypeSchema.enum.boolean).readonly(),
|
|
679
|
+
content: translatableBooleanSchema
|
|
680
|
+
});
|
|
681
|
+
const directValueSchema = z.union([
|
|
682
|
+
directStringValueSchema,
|
|
683
|
+
directNumberValueSchema,
|
|
684
|
+
directBooleanValueSchema
|
|
685
|
+
]);
|
|
686
|
+
const referencedValueSchema = z.object({
|
|
687
|
+
objectType: z.literal(objectTypeSchema.enum.value).readonly(),
|
|
688
|
+
fieldDefinitionId: uuidSchema.readonly(),
|
|
689
|
+
valueType: z.literal(ValueTypeSchema.enum.reference).readonly(),
|
|
690
|
+
content: translatableArrayOf(valueContentReferenceSchema)
|
|
691
|
+
});
|
|
692
|
+
const valueSchema = z.union([directValueSchema, referencedValueSchema]);
|
|
693
|
+
/**
|
|
694
|
+
* ---
|
|
695
|
+
*/
|
|
696
|
+
/**
|
|
697
|
+
* @todo maybe we need to validate Values and shared Values
|
|
698
|
+
*/
|
|
699
|
+
|
|
700
|
+
//#endregion
|
|
701
|
+
//#region src/schema/entrySchema.ts
|
|
702
|
+
const entryFileSchema = baseFileSchema.extend({
|
|
703
|
+
objectType: z.literal(objectTypeSchema.enum.entry).readonly(),
|
|
704
|
+
values: z.array(valueSchema)
|
|
705
|
+
});
|
|
706
|
+
const entrySchema = entryFileSchema.extend({ history: z.array(gitCommitSchema) }).openapi("Entry");
|
|
707
|
+
const entryExportSchema = entrySchema.extend({});
|
|
708
|
+
const createEntrySchema = entryFileSchema.omit({
|
|
709
|
+
id: true,
|
|
710
|
+
objectType: true,
|
|
711
|
+
created: true,
|
|
712
|
+
updated: true
|
|
713
|
+
}).extend({
|
|
714
|
+
projectId: uuidSchema.readonly(),
|
|
715
|
+
collectionId: uuidSchema.readonly(),
|
|
716
|
+
values: z.array(valueSchema)
|
|
717
|
+
});
|
|
718
|
+
const readEntrySchema = z.object({
|
|
719
|
+
id: uuidSchema.readonly(),
|
|
720
|
+
projectId: uuidSchema.readonly(),
|
|
721
|
+
collectionId: uuidSchema.readonly(),
|
|
722
|
+
commitHash: z.string().optional().readonly()
|
|
723
|
+
});
|
|
724
|
+
const updateEntrySchema = entryFileSchema.omit({
|
|
725
|
+
objectType: true,
|
|
726
|
+
created: true,
|
|
727
|
+
updated: true
|
|
728
|
+
}).extend({
|
|
729
|
+
projectId: uuidSchema.readonly(),
|
|
730
|
+
collectionId: uuidSchema.readonly()
|
|
731
|
+
});
|
|
732
|
+
const deleteEntrySchema = readEntrySchema.extend({});
|
|
733
|
+
const countEntriesSchema = z.object({
|
|
734
|
+
projectId: uuidSchema.readonly(),
|
|
735
|
+
collectionId: uuidSchema.readonly()
|
|
736
|
+
});
|
|
737
|
+
|
|
738
|
+
//#endregion
|
|
739
|
+
//#region src/schema/fieldSchema.ts
|
|
740
|
+
const FieldTypeSchema = z.enum([
|
|
741
|
+
"text",
|
|
742
|
+
"textarea",
|
|
743
|
+
"email",
|
|
744
|
+
"url",
|
|
745
|
+
"ipv4",
|
|
746
|
+
"date",
|
|
747
|
+
"time",
|
|
748
|
+
"datetime",
|
|
749
|
+
"telephone",
|
|
750
|
+
"number",
|
|
751
|
+
"range",
|
|
752
|
+
"toggle",
|
|
753
|
+
"asset",
|
|
754
|
+
"entry"
|
|
755
|
+
]);
|
|
756
|
+
const FieldWidthSchema = z.enum([
|
|
757
|
+
"12",
|
|
758
|
+
"6",
|
|
759
|
+
"4",
|
|
760
|
+
"3"
|
|
761
|
+
]);
|
|
762
|
+
const FieldDefinitionBaseSchema = z.object({
|
|
763
|
+
id: uuidSchema.readonly(),
|
|
764
|
+
label: translatableStringSchema,
|
|
765
|
+
description: translatableStringSchema.nullable(),
|
|
766
|
+
isRequired: z.boolean(),
|
|
767
|
+
isDisabled: z.boolean(),
|
|
768
|
+
isUnique: z.boolean(),
|
|
769
|
+
inputWidth: FieldWidthSchema
|
|
770
|
+
});
|
|
771
|
+
/**
|
|
772
|
+
* String based Field definitions
|
|
773
|
+
*/
|
|
774
|
+
const StringFieldDefinitionBaseSchema = FieldDefinitionBaseSchema.extend({
|
|
775
|
+
valueType: z.literal(ValueTypeSchema.enum.string),
|
|
776
|
+
defaultValue: z.string().nullable()
|
|
777
|
+
});
|
|
778
|
+
const textFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
779
|
+
fieldType: z.literal(FieldTypeSchema.enum.text),
|
|
780
|
+
min: z.number().nullable(),
|
|
781
|
+
max: z.number().nullable()
|
|
782
|
+
});
|
|
783
|
+
const textareaFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
784
|
+
fieldType: z.literal(FieldTypeSchema.enum.textarea),
|
|
785
|
+
min: z.number().nullable(),
|
|
786
|
+
max: z.number().nullable()
|
|
787
|
+
});
|
|
788
|
+
const emailFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
789
|
+
fieldType: z.literal(FieldTypeSchema.enum.email),
|
|
790
|
+
defaultValue: z.email().nullable()
|
|
791
|
+
});
|
|
792
|
+
const urlFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
793
|
+
fieldType: z.literal(FieldTypeSchema.enum.url),
|
|
794
|
+
defaultValue: z.url().nullable()
|
|
795
|
+
});
|
|
796
|
+
const ipv4FieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
797
|
+
fieldType: z.literal(FieldTypeSchema.enum.ipv4),
|
|
798
|
+
defaultValue: z.ipv4().nullable()
|
|
799
|
+
});
|
|
800
|
+
const dateFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
801
|
+
fieldType: z.literal(FieldTypeSchema.enum.date),
|
|
802
|
+
defaultValue: z.iso.date().nullable()
|
|
803
|
+
});
|
|
804
|
+
const timeFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
805
|
+
fieldType: z.literal(FieldTypeSchema.enum.time),
|
|
806
|
+
defaultValue: z.iso.time().nullable()
|
|
807
|
+
});
|
|
808
|
+
const datetimeFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
809
|
+
fieldType: z.literal(FieldTypeSchema.enum.datetime),
|
|
810
|
+
defaultValue: z.iso.datetime().nullable()
|
|
811
|
+
});
|
|
812
|
+
const telephoneFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
813
|
+
fieldType: z.literal(FieldTypeSchema.enum.telephone),
|
|
814
|
+
defaultValue: z.e164().nullable()
|
|
815
|
+
});
|
|
816
|
+
const stringFieldDefinitionSchema = z.union([
|
|
817
|
+
textFieldDefinitionSchema,
|
|
818
|
+
textareaFieldDefinitionSchema,
|
|
819
|
+
emailFieldDefinitionSchema,
|
|
820
|
+
urlFieldDefinitionSchema,
|
|
821
|
+
ipv4FieldDefinitionSchema,
|
|
822
|
+
dateFieldDefinitionSchema,
|
|
823
|
+
timeFieldDefinitionSchema,
|
|
824
|
+
datetimeFieldDefinitionSchema,
|
|
825
|
+
telephoneFieldDefinitionSchema
|
|
826
|
+
]);
|
|
827
|
+
/**
|
|
828
|
+
* Number based Field definitions
|
|
829
|
+
*/
|
|
830
|
+
const NumberFieldDefinitionBaseSchema = FieldDefinitionBaseSchema.extend({
|
|
831
|
+
valueType: z.literal(ValueTypeSchema.enum.number),
|
|
832
|
+
min: z.number().nullable(),
|
|
833
|
+
max: z.number().nullable(),
|
|
834
|
+
isUnique: z.literal(false),
|
|
835
|
+
defaultValue: z.number().nullable()
|
|
836
|
+
});
|
|
837
|
+
const numberFieldDefinitionSchema = NumberFieldDefinitionBaseSchema.extend({ fieldType: z.literal(FieldTypeSchema.enum.number) });
|
|
838
|
+
const rangeFieldDefinitionSchema = NumberFieldDefinitionBaseSchema.extend({
|
|
839
|
+
fieldType: z.literal(FieldTypeSchema.enum.range),
|
|
840
|
+
isRequired: z.literal(true),
|
|
841
|
+
min: z.number(),
|
|
842
|
+
max: z.number(),
|
|
843
|
+
defaultValue: z.number()
|
|
844
|
+
});
|
|
845
|
+
/**
|
|
846
|
+
* Boolean based Field definitions
|
|
847
|
+
*/
|
|
848
|
+
const BooleanFieldDefinitionBaseSchema = FieldDefinitionBaseSchema.extend({
|
|
849
|
+
valueType: z.literal(ValueTypeSchema.enum.boolean),
|
|
850
|
+
isRequired: z.literal(true),
|
|
851
|
+
defaultValue: z.boolean(),
|
|
852
|
+
isUnique: z.literal(false)
|
|
853
|
+
});
|
|
854
|
+
const toggleFieldDefinitionSchema = BooleanFieldDefinitionBaseSchema.extend({ fieldType: z.literal(FieldTypeSchema.enum.toggle) });
|
|
855
|
+
/**
|
|
856
|
+
* Reference based Field definitions
|
|
857
|
+
*/
|
|
858
|
+
const ReferenceFieldDefinitionBaseSchema = FieldDefinitionBaseSchema.extend({ valueType: z.literal(ValueTypeSchema.enum.reference) });
|
|
859
|
+
const assetFieldDefinitionSchema = ReferenceFieldDefinitionBaseSchema.extend({
|
|
860
|
+
fieldType: z.literal(FieldTypeSchema.enum.asset),
|
|
861
|
+
min: z.number().nullable(),
|
|
862
|
+
max: z.number().nullable()
|
|
863
|
+
});
|
|
864
|
+
const entryFieldDefinitionSchema = ReferenceFieldDefinitionBaseSchema.extend({
|
|
865
|
+
fieldType: z.literal(FieldTypeSchema.enum.entry),
|
|
866
|
+
ofCollections: z.array(uuidSchema),
|
|
867
|
+
min: z.number().nullable(),
|
|
868
|
+
max: z.number().nullable()
|
|
869
|
+
});
|
|
870
|
+
const fieldDefinitionSchema = z.union([
|
|
871
|
+
stringFieldDefinitionSchema,
|
|
872
|
+
numberFieldDefinitionSchema,
|
|
873
|
+
rangeFieldDefinitionSchema,
|
|
874
|
+
toggleFieldDefinitionSchema,
|
|
875
|
+
assetFieldDefinitionSchema,
|
|
876
|
+
entryFieldDefinitionSchema
|
|
877
|
+
]);
|
|
878
|
+
|
|
879
|
+
//#endregion
|
|
880
|
+
//#region src/schema/collectionSchema.ts
|
|
881
|
+
const collectionFileSchema = baseFileSchema.extend({
|
|
882
|
+
objectType: z.literal(objectTypeSchema.enum.collection).readonly(),
|
|
883
|
+
name: z.object({
|
|
884
|
+
singular: translatableStringSchema,
|
|
885
|
+
plural: translatableStringSchema
|
|
886
|
+
}),
|
|
887
|
+
slug: z.object({
|
|
888
|
+
singular: z.string(),
|
|
889
|
+
plural: z.string()
|
|
890
|
+
}),
|
|
891
|
+
description: translatableStringSchema,
|
|
892
|
+
icon: supportedIconSchema,
|
|
893
|
+
fieldDefinitions: z.array(fieldDefinitionSchema)
|
|
894
|
+
});
|
|
895
|
+
const collectionSchema = collectionFileSchema.extend({ history: z.array(gitCommitSchema) }).openapi("Collection");
|
|
896
|
+
const collectionExportSchema = collectionSchema.extend({ entries: z.array(entryExportSchema) });
|
|
897
|
+
const createCollectionSchema = collectionFileSchema.omit({
|
|
898
|
+
id: true,
|
|
899
|
+
objectType: true,
|
|
900
|
+
created: true,
|
|
901
|
+
updated: true
|
|
902
|
+
}).extend({ projectId: uuidSchema.readonly() });
|
|
903
|
+
const readCollectionSchema = z.object({
|
|
904
|
+
id: uuidSchema.readonly(),
|
|
905
|
+
projectId: uuidSchema.readonly(),
|
|
906
|
+
commitHash: z.string().optional().readonly()
|
|
907
|
+
});
|
|
908
|
+
const updateCollectionSchema = collectionFileSchema.pick({
|
|
909
|
+
id: true,
|
|
910
|
+
name: true,
|
|
911
|
+
slug: true,
|
|
912
|
+
description: true,
|
|
913
|
+
icon: true,
|
|
914
|
+
fieldDefinitions: true
|
|
915
|
+
}).extend({ projectId: uuidSchema.readonly() });
|
|
916
|
+
const deleteCollectionSchema = readCollectionSchema.extend({});
|
|
917
|
+
const countCollectionsSchema = z.object({ projectId: uuidSchema.readonly() });
|
|
918
|
+
|
|
919
|
+
//#endregion
|
|
920
|
+
//#region src/schema/coreSchema.ts
|
|
921
|
+
/**
|
|
922
|
+
* Options that can be passed to elek.io core
|
|
923
|
+
*/
|
|
924
|
+
const elekIoCoreOptionsSchema = z.object({
|
|
925
|
+
log: z.object({ level: logLevelSchema }),
|
|
926
|
+
file: z.object({ cache: z.boolean() })
|
|
927
|
+
});
|
|
928
|
+
const constructorElekIoCoreSchema = elekIoCoreOptionsSchema.partial({
|
|
929
|
+
log: true,
|
|
930
|
+
file: true
|
|
931
|
+
}).optional();
|
|
932
|
+
|
|
933
|
+
//#endregion
|
|
934
|
+
//#region src/schema/projectSchema.ts
|
|
935
|
+
const projectStatusSchema = z.enum([
|
|
936
|
+
"foo",
|
|
937
|
+
"bar",
|
|
938
|
+
"todo"
|
|
939
|
+
]);
|
|
940
|
+
const projectSettingsSchema = z.object({ language: z.object({
|
|
941
|
+
default: supportedLanguageSchema,
|
|
942
|
+
supported: z.array(supportedLanguageSchema)
|
|
943
|
+
}) });
|
|
944
|
+
const projectFolderSchema = z.enum([
|
|
945
|
+
"assets",
|
|
946
|
+
"collections",
|
|
947
|
+
"shared-values",
|
|
948
|
+
"lfs"
|
|
949
|
+
]);
|
|
950
|
+
const projectBranchSchema = z.enum(["production", "work"]);
|
|
951
|
+
const projectFileSchema = baseFileSchema.extend({
|
|
952
|
+
objectType: z.literal(objectTypeSchema.enum.project).readonly(),
|
|
953
|
+
coreVersion: versionSchema,
|
|
954
|
+
name: z.string().trim().min(1, "shared.projectNameRequired"),
|
|
955
|
+
description: z.string().trim().min(1, "shared.projectDescriptionRequired"),
|
|
956
|
+
version: versionSchema,
|
|
957
|
+
status: projectStatusSchema,
|
|
958
|
+
settings: projectSettingsSchema
|
|
959
|
+
});
|
|
960
|
+
const projectSchema = projectFileSchema.extend({
|
|
961
|
+
remoteOriginUrl: z.string().nullable().openapi({ description: "URL of the remote Git repository" }),
|
|
962
|
+
history: z.array(gitCommitSchema).openapi({ description: "Commit history of this Project" }),
|
|
963
|
+
fullHistory: z.array(gitCommitSchema).openapi({ description: "Full commit history of this Project including all Assets, Collections, Entries and other files" })
|
|
964
|
+
}).openapi("Project");
|
|
965
|
+
const outdatedProjectSchema = projectFileSchema.pick({
|
|
966
|
+
id: true,
|
|
967
|
+
name: true,
|
|
968
|
+
coreVersion: true
|
|
969
|
+
});
|
|
970
|
+
const projectExportSchema = projectSchema.extend({
|
|
971
|
+
assets: z.array(assetExportSchema),
|
|
972
|
+
collections: z.array(collectionExportSchema)
|
|
973
|
+
});
|
|
974
|
+
const createProjectSchema = projectSchema.pick({
|
|
975
|
+
name: true,
|
|
976
|
+
description: true,
|
|
977
|
+
settings: true
|
|
978
|
+
}).partial({
|
|
979
|
+
description: true,
|
|
980
|
+
settings: true
|
|
981
|
+
});
|
|
982
|
+
const readProjectSchema = z.object({
|
|
983
|
+
id: uuidSchema.readonly(),
|
|
984
|
+
commitHash: z.string().optional().readonly()
|
|
985
|
+
});
|
|
986
|
+
const updateProjectSchema = projectSchema.pick({
|
|
987
|
+
id: true,
|
|
988
|
+
name: true,
|
|
989
|
+
description: true,
|
|
990
|
+
settings: true
|
|
991
|
+
}).partial({
|
|
992
|
+
name: true,
|
|
993
|
+
description: true,
|
|
994
|
+
settings: true
|
|
995
|
+
});
|
|
996
|
+
const upgradeProjectSchema = z.object({
|
|
997
|
+
id: uuidSchema.readonly(),
|
|
998
|
+
force: z.boolean().optional()
|
|
999
|
+
});
|
|
1000
|
+
const deleteProjectSchema = readProjectSchema.extend({ force: z.boolean().optional() });
|
|
1001
|
+
const projectUpgradeSchema = z.object({
|
|
1002
|
+
to: versionSchema.readonly(),
|
|
1003
|
+
run: z.function({
|
|
1004
|
+
input: [projectFileSchema],
|
|
1005
|
+
output: z.promise(z.void())
|
|
1006
|
+
})
|
|
1007
|
+
});
|
|
1008
|
+
const cloneProjectSchema = z.object({ url: z.string() });
|
|
1009
|
+
const listBranchesProjectSchema = z.object({ id: uuidSchema.readonly() });
|
|
1010
|
+
const currentBranchProjectSchema = z.object({ id: uuidSchema.readonly() });
|
|
1011
|
+
const switchBranchProjectSchema = z.object({
|
|
1012
|
+
id: uuidSchema.readonly(),
|
|
1013
|
+
branch: z.string(),
|
|
1014
|
+
options: gitSwitchOptionsSchema.optional()
|
|
1015
|
+
});
|
|
1016
|
+
const getRemoteOriginUrlProjectSchema = z.object({ id: uuidSchema.readonly() });
|
|
1017
|
+
const setRemoteOriginUrlProjectSchema = z.object({
|
|
1018
|
+
id: uuidSchema.readonly(),
|
|
1019
|
+
url: z.string()
|
|
1020
|
+
});
|
|
1021
|
+
const getChangesProjectSchema = z.object({ id: uuidSchema.readonly() });
|
|
1022
|
+
const synchronizeProjectSchema = z.object({ id: uuidSchema.readonly() });
|
|
1023
|
+
const searchProjectSchema = z.object({
|
|
1024
|
+
id: uuidSchema.readonly(),
|
|
1025
|
+
query: z.string(),
|
|
1026
|
+
language: supportedLanguageSchema,
|
|
1027
|
+
type: z.array(objectTypeSchema).optional()
|
|
1028
|
+
});
|
|
1029
|
+
|
|
1030
|
+
//#endregion
|
|
1031
|
+
//#region src/schema/schemaFromFieldDefinition.ts
|
|
1032
|
+
/**
|
|
1033
|
+
* Boolean Values are always either true or false, so we don't need the Field definition here
|
|
1034
|
+
*/
|
|
1035
|
+
function getBooleanValueContentSchemaFromFieldDefinition() {
|
|
1036
|
+
return z.boolean();
|
|
1037
|
+
}
|
|
1038
|
+
/**
|
|
1039
|
+
* Number Values can have min and max values and can be required or not
|
|
1040
|
+
*/
|
|
1041
|
+
function getNumberValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
1042
|
+
let schema = z.number();
|
|
1043
|
+
if (fieldDefinition.min) schema = schema.min(fieldDefinition.min);
|
|
1044
|
+
if (fieldDefinition.max) schema = schema.max(fieldDefinition.max);
|
|
1045
|
+
if (fieldDefinition.isRequired === false) return schema.nullable();
|
|
1046
|
+
return schema;
|
|
1047
|
+
}
|
|
1048
|
+
/**
|
|
1049
|
+
* String Values can have different formats (email, url, ipv4, date, time, ...)
|
|
1050
|
+
* and can have min and max length and can be required or not
|
|
1051
|
+
*/
|
|
1052
|
+
function getStringValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
1053
|
+
let schema = null;
|
|
1054
|
+
switch (fieldDefinition.fieldType) {
|
|
1055
|
+
case FieldTypeSchema.enum.email:
|
|
1056
|
+
schema = z.email();
|
|
1057
|
+
break;
|
|
1058
|
+
case FieldTypeSchema.enum.url:
|
|
1059
|
+
schema = z.url();
|
|
1060
|
+
break;
|
|
1061
|
+
case FieldTypeSchema.enum.ipv4:
|
|
1062
|
+
schema = z.ipv4();
|
|
1063
|
+
break;
|
|
1064
|
+
case FieldTypeSchema.enum.date:
|
|
1065
|
+
schema = z.iso.date();
|
|
1066
|
+
break;
|
|
1067
|
+
case FieldTypeSchema.enum.time:
|
|
1068
|
+
schema = z.iso.time();
|
|
1069
|
+
break;
|
|
1070
|
+
case FieldTypeSchema.enum.datetime:
|
|
1071
|
+
schema = z.iso.datetime();
|
|
1072
|
+
break;
|
|
1073
|
+
case FieldTypeSchema.enum.telephone:
|
|
1074
|
+
schema = z.e164();
|
|
1075
|
+
break;
|
|
1076
|
+
case FieldTypeSchema.enum.text:
|
|
1077
|
+
case FieldTypeSchema.enum.textarea:
|
|
1078
|
+
schema = z.string().trim();
|
|
1079
|
+
break;
|
|
1080
|
+
}
|
|
1081
|
+
if ("min" in fieldDefinition && fieldDefinition.min) schema = schema.min(fieldDefinition.min);
|
|
1082
|
+
if ("max" in fieldDefinition && fieldDefinition.max) schema = schema.max(fieldDefinition.max);
|
|
1083
|
+
if (fieldDefinition.isRequired === false) return schema.nullable();
|
|
1084
|
+
return schema.min(1, "shared.stringValueRequired");
|
|
1085
|
+
}
|
|
1086
|
+
/**
|
|
1087
|
+
* Reference Values can reference either Assets or Entries (or Shared Values in the future)
|
|
1088
|
+
* and can have min and max number of references and can be required or not
|
|
1089
|
+
*/
|
|
1090
|
+
function getReferenceValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
1091
|
+
let schema;
|
|
1092
|
+
switch (fieldDefinition.fieldType) {
|
|
1093
|
+
case FieldTypeSchema.enum.asset:
|
|
1094
|
+
schema = z.array(valueContentReferenceToAssetSchema);
|
|
1095
|
+
break;
|
|
1096
|
+
case FieldTypeSchema.enum.entry:
|
|
1097
|
+
schema = z.array(valueContentReferenceToEntrySchema);
|
|
1098
|
+
break;
|
|
1099
|
+
}
|
|
1100
|
+
if (fieldDefinition.isRequired) schema = schema.min(1, "shared.referenceRequired");
|
|
1101
|
+
if (fieldDefinition.min) schema = schema.min(fieldDefinition.min);
|
|
1102
|
+
if (fieldDefinition.max) schema = schema.max(fieldDefinition.max);
|
|
1103
|
+
return schema;
|
|
1104
|
+
}
|
|
1105
|
+
function getTranslatableStringValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
1106
|
+
return z.partialRecord(supportedLanguageSchema, getStringValueContentSchemaFromFieldDefinition(fieldDefinition));
|
|
1107
|
+
}
|
|
1108
|
+
function getTranslatableNumberValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
1109
|
+
return z.partialRecord(supportedLanguageSchema, getNumberValueContentSchemaFromFieldDefinition(fieldDefinition));
|
|
1110
|
+
}
|
|
1111
|
+
function getTranslatableBooleanValueContentSchemaFromFieldDefinition() {
|
|
1112
|
+
return z.partialRecord(supportedLanguageSchema, getBooleanValueContentSchemaFromFieldDefinition());
|
|
1113
|
+
}
|
|
1114
|
+
function getTranslatableReferenceValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
1115
|
+
return z.partialRecord(supportedLanguageSchema, getReferenceValueContentSchemaFromFieldDefinition(fieldDefinition));
|
|
1116
|
+
}
|
|
1117
|
+
/**
|
|
1118
|
+
* Generates a zod schema to check a Value based on given Field definition
|
|
1119
|
+
*/
|
|
1120
|
+
function getValueSchemaFromFieldDefinition(fieldDefinition) {
|
|
1121
|
+
switch (fieldDefinition.valueType) {
|
|
1122
|
+
case ValueTypeSchema.enum.boolean: return directBooleanValueSchema.extend({ content: getTranslatableBooleanValueContentSchemaFromFieldDefinition() });
|
|
1123
|
+
case ValueTypeSchema.enum.number: return directNumberValueSchema.extend({ content: getTranslatableNumberValueContentSchemaFromFieldDefinition(fieldDefinition) });
|
|
1124
|
+
case ValueTypeSchema.enum.string: return directStringValueSchema.extend({ content: getTranslatableStringValueContentSchemaFromFieldDefinition(fieldDefinition) });
|
|
1125
|
+
case ValueTypeSchema.enum.reference: return referencedValueSchema.extend({ content: getTranslatableReferenceValueContentSchemaFromFieldDefinition(fieldDefinition) });
|
|
1126
|
+
default: throw new Error(`Error generating schema for unsupported ValueType "${fieldDefinition.valueType}"`);
|
|
1127
|
+
}
|
|
1128
|
+
}
|
|
1129
|
+
/**
|
|
1130
|
+
* Generates a schema for creating a new Entry based on the given Field definitions and Values
|
|
1131
|
+
*/
|
|
1132
|
+
function getCreateEntrySchemaFromFieldDefinitions(fieldDefinitions) {
|
|
1133
|
+
const valueSchemas = fieldDefinitions.map((fieldDefinition) => {
|
|
1134
|
+
return getValueSchemaFromFieldDefinition(fieldDefinition);
|
|
1135
|
+
});
|
|
1136
|
+
return z.object({
|
|
1137
|
+
...createEntrySchema.shape,
|
|
1138
|
+
values: z.tuple(valueSchemas)
|
|
1139
|
+
});
|
|
1140
|
+
}
|
|
1141
|
+
/**
|
|
1142
|
+
* Generates a schema for updating an existing Entry based on the given Field definitions and Values
|
|
1143
|
+
*/
|
|
1144
|
+
function getUpdateEntrySchemaFromFieldDefinitions(fieldDefinitions) {
|
|
1145
|
+
const valueSchemas = fieldDefinitions.map((fieldDefinition) => {
|
|
1146
|
+
return getValueSchemaFromFieldDefinition(fieldDefinition);
|
|
1147
|
+
});
|
|
1148
|
+
return z.object({
|
|
1149
|
+
...updateEntrySchema.shape,
|
|
1150
|
+
values: z.tuple(valueSchemas)
|
|
1151
|
+
});
|
|
1152
|
+
}
|
|
1153
|
+
|
|
1154
|
+
//#endregion
|
|
1155
|
+
//#region src/schema/serviceSchema.ts
|
|
1156
|
+
const serviceTypeSchema = z.enum([
|
|
1157
|
+
"Git",
|
|
1158
|
+
"GitTag",
|
|
1159
|
+
"User",
|
|
1160
|
+
"Project",
|
|
1161
|
+
"Asset",
|
|
1162
|
+
"JsonFile",
|
|
1163
|
+
"Search",
|
|
1164
|
+
"Collection",
|
|
1165
|
+
"Entry",
|
|
1166
|
+
"Value"
|
|
1167
|
+
]);
|
|
1168
|
+
function paginatedListOf(schema) {
|
|
1169
|
+
return z.object({
|
|
1170
|
+
total: z.number(),
|
|
1171
|
+
limit: z.number(),
|
|
1172
|
+
offset: z.number(),
|
|
1173
|
+
list: z.array(schema)
|
|
1174
|
+
});
|
|
1175
|
+
}
|
|
1176
|
+
const listSchema = z.object({
|
|
1177
|
+
projectId: uuidSchema,
|
|
1178
|
+
limit: z.number().optional(),
|
|
1179
|
+
offset: z.number().optional()
|
|
1180
|
+
});
|
|
1181
|
+
const listCollectionsSchema = listSchema;
|
|
1182
|
+
const listEntriesSchema = listSchema.extend({ collectionId: uuidSchema });
|
|
1183
|
+
const listAssetsSchema = listSchema;
|
|
1184
|
+
const listProjectsSchema = listSchema.omit({ projectId: true });
|
|
1185
|
+
const listGitTagsSchema = z.object({ path: z.string() });
|
|
1186
|
+
|
|
1187
|
+
//#endregion
|
|
1188
|
+
//#region src/schema/userSchema.ts
|
|
1189
|
+
const UserTypeSchema = z.enum(["local", "cloud"]);
|
|
1190
|
+
const baseUserSchema = gitSignatureSchema.extend({
|
|
1191
|
+
userType: UserTypeSchema,
|
|
1192
|
+
language: supportedLanguageSchema,
|
|
1193
|
+
localApi: z.object({
|
|
1194
|
+
isEnabled: z.boolean(),
|
|
1195
|
+
port: z.number()
|
|
1196
|
+
})
|
|
1197
|
+
});
|
|
1198
|
+
const localUserSchema = baseUserSchema.extend({ userType: z.literal(UserTypeSchema.enum.local) });
|
|
1199
|
+
const cloudUserSchema = baseUserSchema.extend({
|
|
1200
|
+
userType: z.literal(UserTypeSchema.enum.cloud),
|
|
1201
|
+
id: uuidSchema
|
|
1202
|
+
});
|
|
1203
|
+
const userFileSchema = z.union([localUserSchema, cloudUserSchema]);
|
|
1204
|
+
const userSchema = userFileSchema;
|
|
1205
|
+
const setUserSchema = userSchema;
|
|
1206
|
+
|
|
1207
|
+
//#endregion
|
|
1208
|
+
//#region src/schema/cliSchema.ts
|
|
1209
|
+
const outDirSchema = z$1.string().default("./.elek.io");
|
|
1210
|
+
const languageSchema = z$1.enum(["ts", "js"]).default("ts");
|
|
1211
|
+
const formatSchema = z$1.enum(["esm", "cjs"]).default("esm");
|
|
1212
|
+
const targetSchema = z$1.enum([
|
|
1213
|
+
"es3",
|
|
1214
|
+
"es5",
|
|
1215
|
+
"es6",
|
|
1216
|
+
"es2015",
|
|
1217
|
+
"es2016",
|
|
1218
|
+
"es2017",
|
|
1219
|
+
"es2018",
|
|
1220
|
+
"es2019",
|
|
1221
|
+
"es2020",
|
|
1222
|
+
"es2021",
|
|
1223
|
+
"es2022",
|
|
1224
|
+
"es2023",
|
|
1225
|
+
"es2024",
|
|
1226
|
+
"esnext"
|
|
1227
|
+
]).default("es2020");
|
|
1228
|
+
const projectsSchema = z$1.string().default("all").transform((value) => {
|
|
1229
|
+
if (value === "all") return "all";
|
|
1230
|
+
return value.split(",").map((v) => uuidSchema.parse(v.trim()));
|
|
1231
|
+
});
|
|
1232
|
+
const generateApiClientOptionsSchema = z$1.object({ watch: z$1.boolean().default(false) });
|
|
1233
|
+
const exportProjectsOptionsSchema = generateApiClientOptionsSchema.extend({ separate: z$1.boolean().default(false) });
|
|
1234
|
+
const generateApiClientSchema = z$1.object({
|
|
1235
|
+
outDir: outDirSchema,
|
|
1236
|
+
language: languageSchema,
|
|
1237
|
+
format: formatSchema,
|
|
1238
|
+
target: targetSchema,
|
|
1239
|
+
options: generateApiClientOptionsSchema
|
|
1240
|
+
});
|
|
1241
|
+
const portSchema = z$1.string().default("31310").transform((value, context) => {
|
|
1242
|
+
try {
|
|
1243
|
+
return parseInt(value);
|
|
1244
|
+
} catch (_error) {
|
|
1245
|
+
context.addIssue({
|
|
1246
|
+
code: "custom",
|
|
1247
|
+
message: "Invalid port number",
|
|
1248
|
+
input: value
|
|
1249
|
+
});
|
|
1250
|
+
return z$1.NEVER;
|
|
1251
|
+
}
|
|
1252
|
+
});
|
|
1253
|
+
const apiStartSchema = z$1.object({ port: portSchema });
|
|
1254
|
+
const exportSchema = z$1.object({
|
|
1255
|
+
outDir: outDirSchema,
|
|
1256
|
+
projects: projectsSchema,
|
|
1257
|
+
options: exportProjectsOptionsSchema
|
|
1258
|
+
});
|
|
1259
|
+
|
|
1260
|
+
//#endregion
|
|
1261
|
+
//#region src/schema/logSchema.ts
|
|
1262
|
+
const logSourceSchema = z.enum(["core", "desktop"]);
|
|
1263
|
+
const logSchema = z.object({
|
|
1264
|
+
source: logSourceSchema,
|
|
1265
|
+
message: z.string(),
|
|
1266
|
+
meta: z.record(z.string(), z.unknown()).optional()
|
|
1267
|
+
});
|
|
1268
|
+
const logConsoleTransportSchema = logSchema.extend({
|
|
1269
|
+
timestamp: z.string(),
|
|
1270
|
+
level: z.string()
|
|
1271
|
+
});
|
|
1272
|
+
|
|
1273
|
+
//#endregion
|
|
1274
|
+
//#region src/api/routes/content/v1/projects.ts
|
|
1275
|
+
const tags$3 = ["Content API v1"];
|
|
1276
|
+
const router$6 = createRouter().openapi(createRoute({
|
|
1277
|
+
summary: "List Projects",
|
|
1278
|
+
description: "Lists all Projects you currently have access to",
|
|
1279
|
+
method: "get",
|
|
1280
|
+
path: "/",
|
|
1281
|
+
tags: tags$3,
|
|
1282
|
+
request: { query: z.object({
|
|
1283
|
+
limit: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1284
|
+
default: 15,
|
|
1285
|
+
description: "The maximum number of Projects to return"
|
|
1286
|
+
}),
|
|
1287
|
+
offset: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1288
|
+
default: 0,
|
|
1289
|
+
description: "The number of Projects to skip before starting to collect the result set"
|
|
1290
|
+
})
|
|
1291
|
+
}) },
|
|
1292
|
+
responses: { [200]: {
|
|
1293
|
+
content: { "application/json": { schema: paginatedListOf(projectSchema) } },
|
|
1294
|
+
description: "A list of Projects you have access to"
|
|
1295
|
+
} }
|
|
1296
|
+
}), async (c) => {
|
|
1297
|
+
const { limit, offset } = c.req.valid("query");
|
|
1298
|
+
const projects = await c.var.projectService.list({
|
|
1299
|
+
limit,
|
|
1300
|
+
offset
|
|
1301
|
+
});
|
|
1302
|
+
return c.json(projects, 200);
|
|
1303
|
+
}).openapi(createRoute({
|
|
1304
|
+
summary: "Count Projects",
|
|
1305
|
+
description: "Counts all Projects you currently have access to",
|
|
1306
|
+
method: "get",
|
|
1307
|
+
path: "/count",
|
|
1308
|
+
tags: tags$3,
|
|
1309
|
+
responses: { [200]: {
|
|
1310
|
+
content: { "application/json": { schema: z.number() } },
|
|
1311
|
+
description: "The number of Projects you have access to"
|
|
1312
|
+
} }
|
|
1313
|
+
}), async (c) => {
|
|
1314
|
+
const count = await c.var.projectService.count();
|
|
1315
|
+
return c.json(count, 200);
|
|
1316
|
+
}).openapi(createRoute({
|
|
1317
|
+
summary: "Get one Project",
|
|
1318
|
+
description: "Retrieve a Project by ID",
|
|
1319
|
+
method: "get",
|
|
1320
|
+
path: "/{projectId}",
|
|
1321
|
+
tags: tags$3,
|
|
1322
|
+
request: { params: z.object({ projectId: uuidSchema.openapi({ param: {
|
|
1323
|
+
name: "projectId",
|
|
1324
|
+
in: "path"
|
|
1325
|
+
} }) }) },
|
|
1326
|
+
responses: { [200]: {
|
|
1327
|
+
content: { "application/json": { schema: projectSchema } },
|
|
1328
|
+
description: "The requested Project"
|
|
1329
|
+
} }
|
|
1330
|
+
}), async (c) => {
|
|
1331
|
+
const { projectId } = c.req.valid("param");
|
|
1332
|
+
const project = await c.var.projectService.read({ id: projectId });
|
|
1333
|
+
return c.json(project, 200);
|
|
1334
|
+
});
|
|
1335
|
+
var projects_default = router$6;
|
|
1336
|
+
|
|
1337
|
+
//#endregion
|
|
1338
|
+
//#region src/api/routes/content/v1/collections.ts
|
|
1339
|
+
const tags$2 = ["Content API v1"];
|
|
1340
|
+
const router$5 = createRouter().openapi(createRoute({
|
|
1341
|
+
summary: "List Collections",
|
|
1342
|
+
description: "Lists all Collections of the given Project",
|
|
1343
|
+
method: "get",
|
|
1344
|
+
path: "/{projectId}/collections",
|
|
1345
|
+
tags: tags$2,
|
|
1346
|
+
request: {
|
|
1347
|
+
params: z.object({ projectId: uuidSchema.openapi({ param: {
|
|
1348
|
+
name: "projectId",
|
|
1349
|
+
in: "path"
|
|
1350
|
+
} }) }),
|
|
1351
|
+
query: z.object({
|
|
1352
|
+
limit: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1353
|
+
default: "15",
|
|
1354
|
+
description: "The maximum number of Collections to return"
|
|
1355
|
+
}),
|
|
1356
|
+
offset: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1357
|
+
default: "0",
|
|
1358
|
+
description: "The number of Collections to skip before starting to collect the result set"
|
|
1359
|
+
})
|
|
1360
|
+
})
|
|
1361
|
+
},
|
|
1362
|
+
responses: { [200]: {
|
|
1363
|
+
content: { "application/json": { schema: paginatedListOf(collectionSchema) } },
|
|
1364
|
+
description: "A list of Collections for the given Project"
|
|
1365
|
+
} }
|
|
1366
|
+
}), async (c) => {
|
|
1367
|
+
const { projectId } = c.req.valid("param");
|
|
1368
|
+
const { limit, offset } = c.req.valid("query");
|
|
1369
|
+
const collections = await c.var.collectionService.list({
|
|
1370
|
+
projectId,
|
|
1371
|
+
limit,
|
|
1372
|
+
offset
|
|
1373
|
+
});
|
|
1374
|
+
return c.json(collections, 200);
|
|
1375
|
+
}).openapi(createRoute({
|
|
1376
|
+
summary: "Count Collections",
|
|
1377
|
+
description: "Counts all Collections of the given Project",
|
|
1378
|
+
method: "get",
|
|
1379
|
+
path: "/{projectId}/collections/count",
|
|
1380
|
+
tags: tags$2,
|
|
1381
|
+
request: { params: z.object({ projectId: uuidSchema.openapi({ param: {
|
|
1382
|
+
name: "projectId",
|
|
1383
|
+
in: "path"
|
|
1384
|
+
} }) }) },
|
|
1385
|
+
responses: { [200]: {
|
|
1386
|
+
content: { "application/json": { schema: z.number() } },
|
|
1387
|
+
description: "The number of Collections of the given Project"
|
|
1388
|
+
} }
|
|
1389
|
+
}), async (c) => {
|
|
1390
|
+
const { projectId } = c.req.valid("param");
|
|
1391
|
+
const count = await c.var.collectionService.count({ projectId });
|
|
1392
|
+
return c.json(count, 200);
|
|
1393
|
+
}).openapi(createRoute({
|
|
1394
|
+
summary: "Get one Collection",
|
|
1395
|
+
description: "Retrieve a Collection by ID",
|
|
1396
|
+
method: "get",
|
|
1397
|
+
path: "/{projectId}/collections/{collectionId}",
|
|
1398
|
+
tags: tags$2,
|
|
1399
|
+
request: { params: z.object({
|
|
1400
|
+
projectId: uuidSchema.openapi({ param: {
|
|
1401
|
+
name: "projectId",
|
|
1402
|
+
in: "path"
|
|
1403
|
+
} }),
|
|
1404
|
+
collectionId: uuidSchema.openapi({ param: {
|
|
1405
|
+
name: "collectionId",
|
|
1406
|
+
in: "path"
|
|
1407
|
+
} })
|
|
1408
|
+
}) },
|
|
1409
|
+
responses: { [200]: {
|
|
1410
|
+
content: { "application/json": { schema: collectionSchema } },
|
|
1411
|
+
description: "The requested Collection"
|
|
1412
|
+
} }
|
|
1413
|
+
}), async (c) => {
|
|
1414
|
+
const { projectId, collectionId } = c.req.valid("param");
|
|
1415
|
+
const collection = await c.var.collectionService.read({
|
|
1416
|
+
projectId,
|
|
1417
|
+
id: collectionId
|
|
1418
|
+
});
|
|
1419
|
+
return c.json(collection, 200);
|
|
1420
|
+
});
|
|
1421
|
+
var collections_default = router$5;
|
|
1422
|
+
|
|
1423
|
+
//#endregion
|
|
1424
|
+
//#region src/api/routes/content/v1/entries.ts
|
|
1425
|
+
const tags$1 = ["Content API v1"];
|
|
1426
|
+
const router$4 = createRouter().openapi(createRoute({
|
|
1427
|
+
summary: "List Entries",
|
|
1428
|
+
description: "Lists all Entries of the given Projects Collection",
|
|
1429
|
+
method: "get",
|
|
1430
|
+
path: "/{projectId}/collections/{collectionId}/entries",
|
|
1431
|
+
tags: tags$1,
|
|
1432
|
+
request: {
|
|
1433
|
+
params: z.object({
|
|
1434
|
+
projectId: uuidSchema.openapi({ param: {
|
|
1435
|
+
name: "projectId",
|
|
1436
|
+
in: "path"
|
|
1437
|
+
} }),
|
|
1438
|
+
collectionId: uuidSchema.openapi({ param: {
|
|
1439
|
+
name: "collectionId",
|
|
1440
|
+
in: "path"
|
|
1441
|
+
} })
|
|
1442
|
+
}),
|
|
1443
|
+
query: z.object({
|
|
1444
|
+
limit: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1445
|
+
default: "15",
|
|
1446
|
+
description: "The maximum number of Entries to return"
|
|
1447
|
+
}),
|
|
1448
|
+
offset: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1449
|
+
default: "0",
|
|
1450
|
+
description: "The number of Entries to skip before starting to collect the result set"
|
|
1451
|
+
})
|
|
1452
|
+
})
|
|
1453
|
+
},
|
|
1454
|
+
responses: { [200]: {
|
|
1455
|
+
content: { "application/json": { schema: paginatedListOf(entrySchema) } },
|
|
1456
|
+
description: "A list of Entries for the given Projects Collection"
|
|
1457
|
+
} }
|
|
1458
|
+
}), async (c) => {
|
|
1459
|
+
const { projectId, collectionId } = c.req.valid("param");
|
|
1460
|
+
const { limit, offset } = c.req.valid("query");
|
|
1461
|
+
const entries = await c.var.entryService.list({
|
|
1462
|
+
projectId,
|
|
1463
|
+
collectionId,
|
|
1464
|
+
limit,
|
|
1465
|
+
offset
|
|
1466
|
+
});
|
|
1467
|
+
return c.json(entries, 200);
|
|
1468
|
+
}).openapi(createRoute({
|
|
1469
|
+
summary: "Count Entries",
|
|
1470
|
+
description: "Counts all Entries of the given Projects Collection",
|
|
1471
|
+
method: "get",
|
|
1472
|
+
path: "/{projectId}/collections/{collectionId}/entries/count",
|
|
1473
|
+
tags: tags$1,
|
|
1474
|
+
request: { params: z.object({
|
|
1475
|
+
projectId: uuidSchema.openapi({ param: {
|
|
1476
|
+
name: "projectId",
|
|
1477
|
+
in: "path"
|
|
1478
|
+
} }),
|
|
1479
|
+
collectionId: uuidSchema.openapi({ param: {
|
|
1480
|
+
name: "collectionId",
|
|
1481
|
+
in: "path"
|
|
1482
|
+
} })
|
|
1483
|
+
}) },
|
|
1484
|
+
responses: { [200]: {
|
|
1485
|
+
content: { "application/json": { schema: z.number() } },
|
|
1486
|
+
description: "The number of Entries of the given Projects Collection"
|
|
1487
|
+
} }
|
|
1488
|
+
}), async (c) => {
|
|
1489
|
+
const { projectId, collectionId } = c.req.valid("param");
|
|
1490
|
+
const count = await c.var.entryService.count({
|
|
1491
|
+
projectId,
|
|
1492
|
+
collectionId
|
|
1493
|
+
});
|
|
1494
|
+
return c.json(count, 200);
|
|
1495
|
+
}).openapi(createRoute({
|
|
1496
|
+
summary: "Get one Entry",
|
|
1497
|
+
description: "Retrieve an Entry by ID",
|
|
1498
|
+
method: "get",
|
|
1499
|
+
path: "/{projectId}/collections/{collectionId}/entries/{entryId}",
|
|
1500
|
+
tags: tags$1,
|
|
1501
|
+
request: { params: z.object({
|
|
1502
|
+
projectId: uuidSchema.openapi({ param: {
|
|
1503
|
+
name: "projectId",
|
|
1504
|
+
in: "path"
|
|
1505
|
+
} }),
|
|
1506
|
+
collectionId: uuidSchema.openapi({ param: {
|
|
1507
|
+
name: "collectionId",
|
|
1508
|
+
in: "path"
|
|
1509
|
+
} }),
|
|
1510
|
+
entryId: uuidSchema.openapi({ param: {
|
|
1511
|
+
name: "entryId",
|
|
1512
|
+
in: "path"
|
|
1513
|
+
} })
|
|
1514
|
+
}) },
|
|
1515
|
+
responses: { [200]: {
|
|
1516
|
+
content: { "application/json": { schema: entrySchema } },
|
|
1517
|
+
description: "The requested Entry"
|
|
1518
|
+
} }
|
|
1519
|
+
}), async (c) => {
|
|
1520
|
+
const { projectId, collectionId, entryId } = c.req.valid("param");
|
|
1521
|
+
const entry = await c.var.entryService.read({
|
|
1522
|
+
projectId,
|
|
1523
|
+
collectionId,
|
|
1524
|
+
id: entryId
|
|
1525
|
+
});
|
|
1526
|
+
return c.json(entry, 200);
|
|
1527
|
+
});
|
|
1528
|
+
var entries_default = router$4;
|
|
1529
|
+
|
|
1530
|
+
//#endregion
|
|
1531
|
+
//#region src/api/routes/content/v1/assets.ts
|
|
1532
|
+
const tags = ["Content API v1"];
|
|
1533
|
+
const router$3 = createRouter().openapi(createRoute({
|
|
1534
|
+
summary: "List Assets",
|
|
1535
|
+
description: "Lists all Assets of the given Project",
|
|
1536
|
+
method: "get",
|
|
1537
|
+
path: "/{projectId}/assets",
|
|
1538
|
+
tags,
|
|
1539
|
+
request: {
|
|
1540
|
+
params: z.object({ projectId: uuidSchema.openapi({ param: {
|
|
1541
|
+
name: "projectId",
|
|
1542
|
+
in: "path"
|
|
1543
|
+
} }) }),
|
|
1544
|
+
query: z.object({
|
|
1545
|
+
limit: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1546
|
+
default: "15",
|
|
1547
|
+
description: "The maximum number of Assets to return"
|
|
1548
|
+
}),
|
|
1549
|
+
offset: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1550
|
+
default: "0",
|
|
1551
|
+
description: "The number of Assets to skip before starting to collect the result set"
|
|
1552
|
+
})
|
|
1553
|
+
})
|
|
1554
|
+
},
|
|
1555
|
+
responses: { [200]: {
|
|
1556
|
+
content: { "application/json": { schema: paginatedListOf(assetSchema) } },
|
|
1557
|
+
description: "A list of Assets for the given Project"
|
|
1558
|
+
} }
|
|
1559
|
+
}), async (c) => {
|
|
1560
|
+
const { projectId } = c.req.valid("param");
|
|
1561
|
+
const { limit, offset } = c.req.valid("query");
|
|
1562
|
+
const assets = await c.var.assetService.list({
|
|
1563
|
+
projectId,
|
|
1564
|
+
limit,
|
|
1565
|
+
offset
|
|
1566
|
+
});
|
|
1567
|
+
return c.json(assets, 200);
|
|
1568
|
+
}).openapi(createRoute({
|
|
1569
|
+
summary: "Count Assets",
|
|
1570
|
+
description: "Counts all Assets of the given Project",
|
|
1571
|
+
method: "get",
|
|
1572
|
+
path: "/{projectId}/assets/count",
|
|
1573
|
+
tags,
|
|
1574
|
+
request: { params: z.object({ projectId: uuidSchema.openapi({ param: {
|
|
1575
|
+
name: "projectId",
|
|
1576
|
+
in: "path"
|
|
1577
|
+
} }) }) },
|
|
1578
|
+
responses: { [200]: {
|
|
1579
|
+
content: { "application/json": { schema: z.number() } },
|
|
1580
|
+
description: "The number of Assets of the given Project"
|
|
1581
|
+
} }
|
|
1582
|
+
}), async (c) => {
|
|
1583
|
+
const { projectId } = c.req.valid("param");
|
|
1584
|
+
const count = await c.var.assetService.count({ projectId });
|
|
1585
|
+
return c.json(count, 200);
|
|
1586
|
+
}).openapi(createRoute({
|
|
1587
|
+
summary: "Get one Asset",
|
|
1588
|
+
description: "Retrieve an Asset by ID",
|
|
1589
|
+
method: "get",
|
|
1590
|
+
path: "/{projectId}/assets/{assetId}",
|
|
1591
|
+
tags,
|
|
1592
|
+
request: { params: z.object({
|
|
1593
|
+
projectId: uuidSchema.openapi({ param: {
|
|
1594
|
+
name: "projectId",
|
|
1595
|
+
in: "path"
|
|
1596
|
+
} }),
|
|
1597
|
+
assetId: uuidSchema.openapi({ param: {
|
|
1598
|
+
name: "assetId",
|
|
1599
|
+
in: "path"
|
|
1600
|
+
} })
|
|
1601
|
+
}) },
|
|
1602
|
+
responses: { [200]: {
|
|
1603
|
+
content: { "application/json": { schema: assetSchema } },
|
|
1604
|
+
description: "The requested Asset"
|
|
1605
|
+
} }
|
|
1606
|
+
}), async (c) => {
|
|
1607
|
+
const { projectId, assetId } = c.req.valid("param");
|
|
1608
|
+
const asset = await c.var.assetService.read({
|
|
1609
|
+
projectId,
|
|
1610
|
+
id: assetId
|
|
1611
|
+
});
|
|
1612
|
+
return c.json(asset, 200);
|
|
1613
|
+
});
|
|
1614
|
+
var assets_default = router$3;
|
|
1615
|
+
|
|
1616
|
+
//#endregion
|
|
1617
|
+
//#region src/api/routes/content/v1/index.ts
|
|
1618
|
+
const router$2 = createRouter().route("/projects", projects_default).route("/projects", collections_default).route("/projects", entries_default).route("/projects", assets_default);
|
|
1619
|
+
var v1_default = router$2;
|
|
1620
|
+
|
|
1621
|
+
//#endregion
|
|
1622
|
+
//#region src/api/routes/content/index.ts
|
|
1623
|
+
const router$1 = createRouter().route("/v1", v1_default);
|
|
1624
|
+
var content_default = router$1;
|
|
1625
|
+
|
|
1626
|
+
//#endregion
|
|
1627
|
+
//#region src/api/routes/index.ts
|
|
1628
|
+
const router = createRouter().route("/content", content_default);
|
|
1629
|
+
var routes_default = router;
|
|
1630
|
+
|
|
1631
|
+
//#endregion
|
|
1632
|
+
//#region src/api/index.ts
|
|
1633
|
+
var LocalApi = class {
|
|
1634
|
+
logService;
|
|
1635
|
+
projectService;
|
|
1636
|
+
collectionService;
|
|
1637
|
+
entryService;
|
|
1638
|
+
assetService;
|
|
1639
|
+
api;
|
|
1640
|
+
server = null;
|
|
1641
|
+
constructor(logService, projectService, collectionService, entryService, assetService) {
|
|
1642
|
+
this.logService = logService;
|
|
1643
|
+
this.projectService = projectService;
|
|
1644
|
+
this.collectionService = collectionService;
|
|
1645
|
+
this.entryService = entryService;
|
|
1646
|
+
this.assetService = assetService;
|
|
1647
|
+
this.api = createApi(this.logService, this.projectService, this.collectionService, this.entryService, this.assetService).route("/", routes_default).doc("/openapi.json", {
|
|
1648
|
+
openapi: "3.0.0",
|
|
1649
|
+
externalDocs: { url: "https://elek.io/docs" },
|
|
1650
|
+
info: {
|
|
1651
|
+
version: "0.1.0",
|
|
1652
|
+
title: "elek.io local API",
|
|
1653
|
+
description: "This API allows reading content from local elek.io Projects. You can use this API for development and building static websites and applications locally."
|
|
1654
|
+
},
|
|
1655
|
+
servers: [{
|
|
1656
|
+
url: "http://localhost:{port}",
|
|
1657
|
+
description: "elek.io local API",
|
|
1658
|
+
variables: { port: {
|
|
1659
|
+
default: 31310,
|
|
1660
|
+
description: "The port specified in elek.io Clients user configuration"
|
|
1661
|
+
} }
|
|
1662
|
+
}],
|
|
1663
|
+
tags: [{
|
|
1664
|
+
name: "Content API v1",
|
|
1665
|
+
description: "Version 1 of the elek.io content API lets you read Projects, Collections, Entries and Assets. \n### Resources\n - [Projects](https://elek.io/docs/projects)\n - [Collections](https://elek.io/docs/collections)\n - [Entries](https://elek.io/docs/entries)\n - [Assets](https://elek.io/docs/assets)"
|
|
1666
|
+
}]
|
|
1667
|
+
});
|
|
1668
|
+
this.api.get("/", Scalar({
|
|
1669
|
+
pageTitle: "elek.io local API",
|
|
1670
|
+
url: "/openapi.json",
|
|
1671
|
+
theme: "kepler",
|
|
1672
|
+
layout: "modern",
|
|
1673
|
+
defaultHttpClient: {
|
|
1674
|
+
targetKey: "js",
|
|
1675
|
+
clientKey: "fetch"
|
|
1676
|
+
}
|
|
1677
|
+
}));
|
|
1678
|
+
}
|
|
1679
|
+
/**
|
|
1680
|
+
* Starts the local API on given port
|
|
1681
|
+
*/
|
|
1682
|
+
start(port) {
|
|
1683
|
+
this.server = serve({
|
|
1684
|
+
fetch: this.api.fetch,
|
|
1685
|
+
port
|
|
1686
|
+
}, (info) => {
|
|
1687
|
+
this.logService.info({
|
|
1688
|
+
source: "core",
|
|
1689
|
+
message: `Started local API on http://localhost:${info.port}`
|
|
1690
|
+
});
|
|
1691
|
+
});
|
|
1692
|
+
}
|
|
1693
|
+
/**
|
|
1694
|
+
* Stops the local API
|
|
1695
|
+
*/
|
|
1696
|
+
stop() {
|
|
1697
|
+
this.server?.close(() => {
|
|
1698
|
+
this.logService.info({
|
|
1699
|
+
source: "core",
|
|
1700
|
+
message: "Stopped local API"
|
|
1701
|
+
});
|
|
1702
|
+
});
|
|
1703
|
+
}
|
|
1704
|
+
/**
|
|
1705
|
+
* Returns true if the local API is running
|
|
1706
|
+
*/
|
|
1707
|
+
isRunning() {
|
|
1708
|
+
if (this.server?.listening) return true;
|
|
1709
|
+
return false;
|
|
1710
|
+
}
|
|
1711
|
+
};
|
|
1712
|
+
|
|
1713
|
+
//#endregion
|
|
1714
|
+
//#region src/error/GitError.ts
|
|
1715
|
+
var GitError = class extends Error {
|
|
1716
|
+
constructor(message) {
|
|
1717
|
+
super(message);
|
|
1718
|
+
this.name = "GitError";
|
|
1719
|
+
}
|
|
1720
|
+
};
|
|
1721
|
+
|
|
1722
|
+
//#endregion
|
|
1723
|
+
//#region src/error/NoCurrentUserError.ts
|
|
1724
|
+
var NoCurrentUserError = class extends Error {
|
|
1725
|
+
constructor() {
|
|
1726
|
+
super("Make sure to set a User via Core before using other methods");
|
|
1727
|
+
this.name = "NoCurrentUserError";
|
|
1728
|
+
}
|
|
1729
|
+
};
|
|
1730
|
+
|
|
1731
|
+
//#endregion
|
|
1732
|
+
//#region src/error/ProjectUpgradeError.ts
|
|
1733
|
+
var ProjectUpgradeError = class extends Error {
|
|
1734
|
+
constructor(message) {
|
|
1735
|
+
super(message);
|
|
1736
|
+
this.name = "ProjectUpgradeError";
|
|
1737
|
+
}
|
|
1738
|
+
};
|
|
1739
|
+
|
|
1740
|
+
//#endregion
|
|
1741
|
+
//#region src/error/RequiredParameterMissingError.ts
|
|
1742
|
+
var RequiredParameterMissingError = class extends Error {
|
|
1743
|
+
constructor(parameter) {
|
|
1744
|
+
super(`Missing required parameter "${parameter}"`);
|
|
1745
|
+
this.name = "RequiredParameterMissingError";
|
|
1746
|
+
}
|
|
1747
|
+
};
|
|
1748
|
+
|
|
1749
|
+
//#endregion
|
|
1750
|
+
//#region src/util/node.ts
|
|
1751
|
+
var node_exports = /* @__PURE__ */ __export({
|
|
1752
|
+
execCommand: () => execCommand,
|
|
1753
|
+
files: () => files,
|
|
1754
|
+
folders: () => folders,
|
|
1755
|
+
isNotAnError: () => isNotAnError,
|
|
1756
|
+
isNotEmpty: () => isNotEmpty,
|
|
1757
|
+
pathTo: () => pathTo,
|
|
1758
|
+
workingDirectory: () => workingDirectory
|
|
1759
|
+
});
|
|
1760
|
+
/**
|
|
1761
|
+
* The directory in which everything is stored and will be worked in
|
|
1762
|
+
*
|
|
1763
|
+
* @todo make the workingDirectory an elek option to be set via app.getPath('home') (electron instead of node)?
|
|
1764
|
+
*/
|
|
1765
|
+
const workingDirectory = Path.join(Os.homedir(), "elek.io");
|
|
1766
|
+
/**
|
|
1767
|
+
* A collection of often used paths
|
|
1768
|
+
*/
|
|
1769
|
+
const pathTo = {
|
|
1770
|
+
tmp: Path.join(workingDirectory, "tmp"),
|
|
1771
|
+
userFile: Path.join(workingDirectory, "user.json"),
|
|
1772
|
+
logs: Path.join(workingDirectory, "logs"),
|
|
1773
|
+
projects: Path.join(workingDirectory, "projects"),
|
|
1774
|
+
project: (projectId) => {
|
|
1775
|
+
return Path.join(pathTo.projects, projectId);
|
|
1776
|
+
},
|
|
1777
|
+
projectFile: (projectId) => {
|
|
1778
|
+
return Path.join(pathTo.project(projectId), "project.json");
|
|
1779
|
+
},
|
|
1780
|
+
lfs: (projectId) => {
|
|
1781
|
+
return Path.join(pathTo.project(projectId), projectFolderSchema.enum.lfs);
|
|
1782
|
+
},
|
|
1783
|
+
collections: (projectId) => {
|
|
1784
|
+
return Path.join(pathTo.project(projectId), projectFolderSchema.enum.collections);
|
|
1785
|
+
},
|
|
1786
|
+
collection: (projectId, id) => {
|
|
1787
|
+
return Path.join(pathTo.collections(projectId), id);
|
|
1788
|
+
},
|
|
1789
|
+
collectionFile: (projectId, id) => {
|
|
1790
|
+
return Path.join(pathTo.collection(projectId, id), "collection.json");
|
|
1791
|
+
},
|
|
1792
|
+
entries: (projectId, collectionId) => {
|
|
1793
|
+
return Path.join(pathTo.collection(projectId, collectionId));
|
|
1794
|
+
},
|
|
1795
|
+
entryFile: (projectId, collectionId, id) => {
|
|
1796
|
+
return Path.join(pathTo.entries(projectId, collectionId), `${id}.json`);
|
|
1797
|
+
},
|
|
1798
|
+
sharedValues: (projectId) => {
|
|
1799
|
+
return Path.join(pathTo.project(projectId), "shared-values");
|
|
1800
|
+
},
|
|
1801
|
+
sharedValueFile: (projectId, id, language) => {
|
|
1802
|
+
return Path.join(pathTo.sharedValues(projectId), `${id}.${language}.json`);
|
|
1803
|
+
},
|
|
1804
|
+
assets: (projectId) => {
|
|
1805
|
+
return Path.join(pathTo.project(projectId), projectFolderSchema.enum.assets);
|
|
1806
|
+
},
|
|
1807
|
+
assetFile: (projectId, id) => {
|
|
1808
|
+
return Path.join(pathTo.assets(projectId), `${id}.json`);
|
|
1809
|
+
},
|
|
1810
|
+
asset: (projectId, id, extension) => {
|
|
1811
|
+
return Path.join(pathTo.lfs(projectId), `${id}.${extension}`);
|
|
1812
|
+
},
|
|
1813
|
+
tmpAsset: (id, commitHash, extension) => {
|
|
1814
|
+
return Path.join(pathTo.tmp, `${id}.${commitHash}.${extension}`);
|
|
1815
|
+
}
|
|
1816
|
+
};
|
|
1817
|
+
/**
|
|
1818
|
+
* Used as parameter for filter() methods to assure,
|
|
1819
|
+
* only values not null, undefined or empty strings are returned
|
|
1820
|
+
*
|
|
1821
|
+
* @param value Value to check
|
|
1822
|
+
*/
|
|
1823
|
+
function isNotEmpty(value) {
|
|
1824
|
+
if (value === null || value === void 0) return false;
|
|
1825
|
+
if (typeof value === "string") {
|
|
1826
|
+
if (value.trim() === "") return false;
|
|
1827
|
+
}
|
|
1828
|
+
return true;
|
|
1829
|
+
}
|
|
1830
|
+
/**
|
|
1831
|
+
* Used as parameter for filter() methods to assure,
|
|
1832
|
+
* only items that are not of type Error are returned
|
|
1833
|
+
*
|
|
1834
|
+
* @param item Item to check
|
|
1835
|
+
*/
|
|
1836
|
+
function isNotAnError(item) {
|
|
1837
|
+
return item instanceof Error !== true;
|
|
1838
|
+
}
|
|
1839
|
+
/**
|
|
1840
|
+
* Returns all folders of given path to a directory
|
|
1841
|
+
*/
|
|
1842
|
+
async function folders(path) {
|
|
1843
|
+
return (await Fs.readdir(path, { withFileTypes: true })).filter((dirent) => {
|
|
1844
|
+
return dirent.isDirectory();
|
|
1845
|
+
});
|
|
1846
|
+
}
|
|
1847
|
+
/**
|
|
1848
|
+
* Returns all files of given path to a directory,
|
|
1849
|
+
* which can be filtered by extension
|
|
1850
|
+
*/
|
|
1851
|
+
async function files(path, extension) {
|
|
1852
|
+
return (await Fs.readdir(path, { withFileTypes: true })).filter((dirent) => {
|
|
1853
|
+
if (extension && dirent.isFile() === true) {
|
|
1854
|
+
if (dirent.name.endsWith(extension)) return true;
|
|
1855
|
+
return false;
|
|
1856
|
+
}
|
|
1857
|
+
return dirent.isFile();
|
|
1858
|
+
});
|
|
1859
|
+
}
|
|
1860
|
+
/**
|
|
1861
|
+
* Executes a shell command async and returns the output.
|
|
1862
|
+
*
|
|
1863
|
+
* When on Windows, it will automatically append `.cmd` to the command if it is in the `commandsToSuffix` list.
|
|
1864
|
+
*/
|
|
1865
|
+
function execCommand({ command, args, options, logger }) {
|
|
1866
|
+
return new Promise((resolve, reject) => {
|
|
1867
|
+
const commandsToSuffix = ["pnpm"];
|
|
1868
|
+
const suffixedCommand = Os.platform() === "win32" ? command.split(" ").map((cmd) => commandsToSuffix.includes(cmd) ? `${cmd}.cmd` : cmd).join(" ") : command;
|
|
1869
|
+
const fullCommand = `${suffixedCommand} ${args.join(" ")}`;
|
|
1870
|
+
const execOptions = {
|
|
1871
|
+
...options,
|
|
1872
|
+
shell: true
|
|
1873
|
+
};
|
|
1874
|
+
const start = Date.now();
|
|
1875
|
+
execFile(suffixedCommand, args, execOptions, (error, stdout, stderr) => {
|
|
1876
|
+
const durationMs = Date.now() - start;
|
|
1877
|
+
if (error) {
|
|
1878
|
+
logger.error({
|
|
1879
|
+
source: "core",
|
|
1880
|
+
message: `Error executing command "${fullCommand}" after ${durationMs}ms: ${error.message}`,
|
|
1881
|
+
meta: {
|
|
1882
|
+
error,
|
|
1883
|
+
stdout: stdout.toString(),
|
|
1884
|
+
stderr: stderr.toString()
|
|
1885
|
+
}
|
|
1886
|
+
});
|
|
1887
|
+
reject(error instanceof Error ? error : new Error(error.message));
|
|
1888
|
+
} else {
|
|
1889
|
+
logger.info({
|
|
1890
|
+
source: "core",
|
|
1891
|
+
message: `Command "${fullCommand}" executed successfully in ${durationMs}ms.`,
|
|
1892
|
+
meta: {
|
|
1893
|
+
stdout: stdout.toString(),
|
|
1894
|
+
stderr: stderr.toString()
|
|
1895
|
+
}
|
|
1896
|
+
});
|
|
1897
|
+
resolve({
|
|
1898
|
+
stdout: stdout.toString(),
|
|
1899
|
+
stderr: stderr.toString()
|
|
1900
|
+
});
|
|
1901
|
+
}
|
|
1902
|
+
});
|
|
1903
|
+
});
|
|
1904
|
+
}
|
|
1905
|
+
|
|
1906
|
+
//#endregion
|
|
1907
|
+
//#region src/service/AbstractCrudService.ts
|
|
1908
|
+
/**
|
|
1909
|
+
* A base service that provides properties for most other services
|
|
1910
|
+
*/
|
|
1911
|
+
var AbstractCrudService = class {
|
|
1912
|
+
type;
|
|
1913
|
+
options;
|
|
1914
|
+
logService;
|
|
1915
|
+
/**
|
|
1916
|
+
* Do not instantiate directly as this is an abstract class
|
|
1917
|
+
*/
|
|
1918
|
+
constructor(type$1, options, logService) {
|
|
1919
|
+
this.type = type$1;
|
|
1920
|
+
this.options = options;
|
|
1921
|
+
this.logService = logService;
|
|
1922
|
+
}
|
|
1923
|
+
/**
|
|
1924
|
+
* Basically a Promise.all() without rejecting if one promise fails to resolve
|
|
1925
|
+
*/
|
|
1926
|
+
async returnResolved(promises) {
|
|
1927
|
+
const toCheck = [];
|
|
1928
|
+
for (let index = 0; index < promises.length; index++) {
|
|
1929
|
+
const promise = promises[index];
|
|
1930
|
+
if (!promise) throw new Error(`No promise found at index "${index}"`);
|
|
1931
|
+
toCheck.push(promise.then((result) => {
|
|
1932
|
+
return result;
|
|
1933
|
+
}).catch((error) => {
|
|
1934
|
+
const actualError = error instanceof Error ? error : new Error(String(error));
|
|
1935
|
+
this.logService.warn({
|
|
1936
|
+
source: "core",
|
|
1937
|
+
message: `Function "returnResolved" catched an error while resolving a promise: ${actualError.message}`,
|
|
1938
|
+
meta: {
|
|
1939
|
+
error: actualError,
|
|
1940
|
+
promise
|
|
1941
|
+
}
|
|
1942
|
+
});
|
|
1943
|
+
return actualError;
|
|
1944
|
+
}));
|
|
1945
|
+
}
|
|
1946
|
+
return (await Promise.all(toCheck)).filter(isNotAnError);
|
|
1947
|
+
}
|
|
1948
|
+
/**
|
|
1949
|
+
* Returns a list of all file references of given project and type
|
|
1950
|
+
*
|
|
1951
|
+
* @param type File type of the references wanted
|
|
1952
|
+
* @param projectId Project to get all asset references from
|
|
1953
|
+
* @param collectionId Only needed when requesting files of type "Entry"
|
|
1954
|
+
*/
|
|
1955
|
+
async listReferences(type$1, projectId, collectionId) {
|
|
1956
|
+
switch (type$1) {
|
|
1957
|
+
case objectTypeSchema.enum.asset:
|
|
1958
|
+
if (!projectId) throw new RequiredParameterMissingError("projectId");
|
|
1959
|
+
return this.getFileReferences(pathTo.lfs(projectId));
|
|
1960
|
+
case objectTypeSchema.enum.project: return this.getFolderReferences(pathTo.projects);
|
|
1961
|
+
case objectTypeSchema.enum.collection:
|
|
1962
|
+
if (!projectId) throw new RequiredParameterMissingError("projectId");
|
|
1963
|
+
return this.getFolderReferences(pathTo.collections(projectId));
|
|
1964
|
+
case objectTypeSchema.enum.entry:
|
|
1965
|
+
if (!projectId) throw new RequiredParameterMissingError("projectId");
|
|
1966
|
+
if (!collectionId) throw new RequiredParameterMissingError("collectionId");
|
|
1967
|
+
return this.getFileReferences(pathTo.collection(projectId, collectionId));
|
|
1968
|
+
case objectTypeSchema.enum.sharedValue:
|
|
1969
|
+
if (!projectId) throw new RequiredParameterMissingError("projectId");
|
|
1970
|
+
return this.getFileReferences(pathTo.sharedValues(projectId));
|
|
1971
|
+
default: throw new Error(`Trying to list files of unsupported type "${type$1}"`);
|
|
1972
|
+
}
|
|
1973
|
+
}
|
|
1974
|
+
async getFolderReferences(path) {
|
|
1975
|
+
return (await folders(path)).map((possibleFolder) => {
|
|
1976
|
+
const folderReference = { id: possibleFolder.name };
|
|
1977
|
+
try {
|
|
1978
|
+
return fileReferenceSchema.parse(folderReference);
|
|
1979
|
+
} catch {
|
|
1980
|
+
this.logService.warn({
|
|
1981
|
+
source: "core",
|
|
1982
|
+
message: `Function "getFolderReferences" is ignoring folder "${possibleFolder.name}" in "${path}" as it does not match the expected format`
|
|
1983
|
+
});
|
|
1984
|
+
return null;
|
|
1985
|
+
}
|
|
1986
|
+
}).filter(isNotEmpty);
|
|
1987
|
+
}
|
|
1988
|
+
/**
|
|
1989
|
+
* Searches for all files inside given folder,
|
|
1990
|
+
* parses their names and returns them as FileReference
|
|
1991
|
+
*
|
|
1992
|
+
* Ignores files if the extension is not supported.
|
|
1993
|
+
*/
|
|
1994
|
+
async getFileReferences(path) {
|
|
1995
|
+
return (await files(path)).map((possibleFile) => {
|
|
1996
|
+
const fileNameArray = possibleFile.name.split(".");
|
|
1997
|
+
const fileReference = {
|
|
1998
|
+
id: fileNameArray[0],
|
|
1999
|
+
extension: fileNameArray[1]
|
|
2000
|
+
};
|
|
2001
|
+
try {
|
|
2002
|
+
return fileReferenceSchema.parse(fileReference);
|
|
2003
|
+
} catch {
|
|
2004
|
+
this.logService.warn({
|
|
2005
|
+
source: "core",
|
|
2006
|
+
message: `Function "getFileReferences" is ignoring file "${possibleFile.name}" in "${path}" as it does not match the expected format`
|
|
2007
|
+
});
|
|
2008
|
+
return null;
|
|
2009
|
+
}
|
|
2010
|
+
}).filter(isNotEmpty);
|
|
2011
|
+
}
|
|
2012
|
+
};
|
|
2013
|
+
|
|
2014
|
+
//#endregion
|
|
2015
|
+
//#region src/util/shared.ts
|
|
2016
|
+
/**
|
|
2017
|
+
* Returns a new UUID
|
|
2018
|
+
*/
|
|
2019
|
+
function uuid() {
|
|
2020
|
+
return v4();
|
|
2021
|
+
}
|
|
2022
|
+
/**
|
|
2023
|
+
* Returns a string representing date and time
|
|
2024
|
+
* in a simplified format based on ISO 8601.
|
|
2025
|
+
* The timezone is always UTC.
|
|
2026
|
+
*
|
|
2027
|
+
* - If value is not given, the current date and time is used
|
|
2028
|
+
* - If value is given, it's converted to above representation and UTC timezone
|
|
2029
|
+
*
|
|
2030
|
+
* @example 'YYYY-MM-DDTHH:mm:ss.sssZ'
|
|
2031
|
+
*
|
|
2032
|
+
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString
|
|
2033
|
+
* @see https://en.wikipedia.org/wiki/ISO_8601
|
|
2034
|
+
*/
|
|
2035
|
+
function datetime(value) {
|
|
2036
|
+
if (!value) return (/* @__PURE__ */ new Date()).toISOString();
|
|
2037
|
+
return new Date(value).toISOString();
|
|
2038
|
+
}
|
|
2039
|
+
/**
|
|
2040
|
+
* Returns the slug of given string
|
|
2041
|
+
*/
|
|
2042
|
+
function slug(string) {
|
|
2043
|
+
return slugify(string, {
|
|
2044
|
+
separator: "-",
|
|
2045
|
+
lowercase: true,
|
|
2046
|
+
decamelize: true
|
|
2047
|
+
});
|
|
2048
|
+
}
|
|
2049
|
+
|
|
2050
|
+
//#endregion
|
|
2051
|
+
//#region src/service/AssetService.ts
|
|
2052
|
+
/**
|
|
2053
|
+
* Service that manages CRUD functionality for Asset files on disk
|
|
2054
|
+
*/
|
|
2055
|
+
var AssetService = class extends AbstractCrudService {
|
|
2056
|
+
jsonFileService;
|
|
2057
|
+
gitService;
|
|
2058
|
+
constructor(options, logService, jsonFileService, gitService) {
|
|
2059
|
+
super(serviceTypeSchema.enum.Asset, options, logService);
|
|
2060
|
+
this.jsonFileService = jsonFileService;
|
|
2061
|
+
this.gitService = gitService;
|
|
2062
|
+
}
|
|
2063
|
+
/**
|
|
2064
|
+
* Creates a new Asset
|
|
2065
|
+
*/
|
|
2066
|
+
async create(props) {
|
|
2067
|
+
createAssetSchema.parse(props);
|
|
2068
|
+
const id = uuid();
|
|
2069
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2070
|
+
const fileType = this.getFileType(props.filePath);
|
|
2071
|
+
const size = await this.getFileSize(props.filePath);
|
|
2072
|
+
const assetPath = pathTo.asset(props.projectId, id, fileType.extension);
|
|
2073
|
+
const assetFilePath = pathTo.assetFile(props.projectId, id);
|
|
2074
|
+
const assetFile = {
|
|
2075
|
+
...props,
|
|
2076
|
+
name: slug(props.name),
|
|
2077
|
+
objectType: "asset",
|
|
2078
|
+
id,
|
|
2079
|
+
created: datetime(),
|
|
2080
|
+
updated: null,
|
|
2081
|
+
extension: fileType.extension,
|
|
2082
|
+
mimeType: fileType.mimeType,
|
|
2083
|
+
size
|
|
2084
|
+
};
|
|
2085
|
+
try {
|
|
2086
|
+
await Fs.copyFile(props.filePath, assetPath);
|
|
2087
|
+
await this.jsonFileService.create(assetFile, assetFilePath, assetFileSchema);
|
|
2088
|
+
} catch (error) {
|
|
2089
|
+
await this.delete({
|
|
2090
|
+
...assetFile,
|
|
2091
|
+
projectId: props.projectId
|
|
2092
|
+
});
|
|
2093
|
+
throw error;
|
|
2094
|
+
}
|
|
2095
|
+
await this.gitService.add(projectPath, [assetFilePath, assetPath]);
|
|
2096
|
+
await this.gitService.commit(projectPath, {
|
|
2097
|
+
method: "create",
|
|
2098
|
+
reference: {
|
|
2099
|
+
objectType: "asset",
|
|
2100
|
+
id
|
|
2101
|
+
}
|
|
2102
|
+
});
|
|
2103
|
+
return this.toAsset(props.projectId, assetFile);
|
|
2104
|
+
}
|
|
2105
|
+
/**
|
|
2106
|
+
* Returns an Asset by ID
|
|
2107
|
+
*
|
|
2108
|
+
* If a commit hash is provided, the Asset is read from history
|
|
2109
|
+
*/
|
|
2110
|
+
async read(props) {
|
|
2111
|
+
readAssetSchema.parse(props);
|
|
2112
|
+
if (!props.commitHash) {
|
|
2113
|
+
const assetFile = await this.jsonFileService.read(pathTo.assetFile(props.projectId, props.id), assetFileSchema);
|
|
2114
|
+
return this.toAsset(props.projectId, assetFile);
|
|
2115
|
+
} else {
|
|
2116
|
+
const assetFile = this.migrate(JSON.parse(await this.gitService.getFileContentAtCommit(pathTo.project(props.projectId), pathTo.assetFile(props.projectId, props.id), props.commitHash)));
|
|
2117
|
+
const assetBlob = await this.gitService.getFileContentAtCommit(pathTo.project(props.projectId), pathTo.asset(props.projectId, props.id, assetFile.extension), props.commitHash, "binary");
|
|
2118
|
+
await Fs.writeFile(pathTo.tmpAsset(assetFile.id, props.commitHash, assetFile.extension), assetBlob, "binary");
|
|
2119
|
+
return this.toAsset(props.projectId, assetFile, props.commitHash);
|
|
2120
|
+
}
|
|
2121
|
+
}
|
|
2122
|
+
/**
|
|
2123
|
+
* Copies an Asset to given file path on disk
|
|
2124
|
+
*/
|
|
2125
|
+
async save(props) {
|
|
2126
|
+
saveAssetSchema.parse(props);
|
|
2127
|
+
const asset = await this.read(props);
|
|
2128
|
+
await Fs.copyFile(asset.absolutePath, props.filePath);
|
|
2129
|
+
}
|
|
2130
|
+
/**
|
|
2131
|
+
* Updates given Asset
|
|
2132
|
+
*
|
|
2133
|
+
* Use the optional "newFilePath" prop to update the Asset itself
|
|
2134
|
+
*/
|
|
2135
|
+
async update(props) {
|
|
2136
|
+
updateAssetSchema.parse(props);
|
|
2137
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2138
|
+
const assetFilePath = pathTo.assetFile(props.projectId, props.id);
|
|
2139
|
+
const prevAssetFile = await this.read(props);
|
|
2140
|
+
const assetFile = {
|
|
2141
|
+
...prevAssetFile,
|
|
2142
|
+
...props,
|
|
2143
|
+
name: slug(props.name),
|
|
2144
|
+
updated: datetime()
|
|
2145
|
+
};
|
|
2146
|
+
if (props.newFilePath) {
|
|
2147
|
+
const fileType = this.getFileType(props.newFilePath);
|
|
2148
|
+
const size = await this.getFileSize(props.newFilePath);
|
|
2149
|
+
const prevAssetPath = pathTo.asset(props.projectId, props.id, prevAssetFile.extension);
|
|
2150
|
+
const assetPath = pathTo.asset(props.projectId, props.id, fileType.extension);
|
|
2151
|
+
await Fs.remove(prevAssetPath);
|
|
2152
|
+
await Fs.copyFile(props.newFilePath, assetPath);
|
|
2153
|
+
await this.gitService.add(projectPath, [prevAssetPath, assetPath]);
|
|
2154
|
+
assetFile.extension = fileType.extension;
|
|
2155
|
+
assetFile.mimeType = fileType.mimeType;
|
|
2156
|
+
assetFile.size = size;
|
|
2157
|
+
}
|
|
2158
|
+
await this.jsonFileService.update(assetFile, assetFilePath, assetFileSchema);
|
|
2159
|
+
await this.gitService.add(projectPath, [assetFilePath]);
|
|
2160
|
+
await this.gitService.commit(projectPath, {
|
|
2161
|
+
method: "update",
|
|
2162
|
+
reference: {
|
|
2163
|
+
objectType: "asset",
|
|
2164
|
+
id: assetFile.id
|
|
2165
|
+
}
|
|
2166
|
+
});
|
|
2167
|
+
return this.toAsset(props.projectId, assetFile);
|
|
2168
|
+
}
|
|
2169
|
+
/**
|
|
2170
|
+
* Deletes given Asset
|
|
2171
|
+
*/
|
|
2172
|
+
async delete(props) {
|
|
2173
|
+
deleteAssetSchema.parse(props);
|
|
2174
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2175
|
+
const assetFilePath = pathTo.assetFile(props.projectId, props.id);
|
|
2176
|
+
const assetPath = pathTo.asset(props.projectId, props.id, props.extension);
|
|
2177
|
+
await Fs.remove(assetPath);
|
|
2178
|
+
await Fs.remove(assetFilePath);
|
|
2179
|
+
await this.gitService.add(projectPath, [assetFilePath, assetPath]);
|
|
2180
|
+
await this.gitService.commit(projectPath, {
|
|
2181
|
+
method: "delete",
|
|
2182
|
+
reference: {
|
|
2183
|
+
objectType: "asset",
|
|
2184
|
+
id: props.id
|
|
2185
|
+
}
|
|
2186
|
+
});
|
|
2187
|
+
}
|
|
2188
|
+
async list(props) {
|
|
2189
|
+
listAssetsSchema.parse(props);
|
|
2190
|
+
const offset = props.offset || 0;
|
|
2191
|
+
const limit = props.limit || 15;
|
|
2192
|
+
const assetReferences = await this.listReferences(objectTypeSchema.enum.asset, props.projectId);
|
|
2193
|
+
const partialAssetReferences = assetReferences.slice(offset, limit);
|
|
2194
|
+
const assets = await this.returnResolved(partialAssetReferences.map((assetReference) => {
|
|
2195
|
+
return this.read({
|
|
2196
|
+
projectId: props.projectId,
|
|
2197
|
+
id: assetReference.id
|
|
2198
|
+
});
|
|
2199
|
+
}));
|
|
2200
|
+
return {
|
|
2201
|
+
total: assetReferences.length,
|
|
2202
|
+
limit,
|
|
2203
|
+
offset,
|
|
2204
|
+
list: assets
|
|
2205
|
+
};
|
|
2206
|
+
}
|
|
2207
|
+
async count(props) {
|
|
2208
|
+
countAssetsSchema.parse(props);
|
|
2209
|
+
return (await this.listReferences(objectTypeSchema.enum.asset, props.projectId)).length;
|
|
2210
|
+
}
|
|
2211
|
+
/**
|
|
2212
|
+
* Checks if given object is of type Asset
|
|
2213
|
+
*/
|
|
2214
|
+
isAsset(obj) {
|
|
2215
|
+
return assetSchema.safeParse(obj).success;
|
|
2216
|
+
}
|
|
2217
|
+
/**
|
|
2218
|
+
* Returns the size of an file in bytes
|
|
2219
|
+
*
|
|
2220
|
+
* @param path Path of the file to get the size from
|
|
2221
|
+
*/
|
|
2222
|
+
async getFileSize(path) {
|
|
2223
|
+
return (await Fs.stat(path)).size;
|
|
2224
|
+
}
|
|
2225
|
+
/**
|
|
2226
|
+
* Creates an Asset from given AssetFile
|
|
2227
|
+
*
|
|
2228
|
+
* @param projectId The project's ID
|
|
2229
|
+
* @param assetFile The AssetFile to convert
|
|
2230
|
+
*/
|
|
2231
|
+
async toAsset(projectId, assetFile, commitHash) {
|
|
2232
|
+
const assetPath = commitHash ? pathTo.tmpAsset(assetFile.id, commitHash, assetFile.extension) : pathTo.asset(projectId, assetFile.id, assetFile.extension);
|
|
2233
|
+
const history = await this.gitService.log(pathTo.project(projectId), { filePath: pathTo.assetFile(projectId, assetFile.id) });
|
|
2234
|
+
return {
|
|
2235
|
+
...assetFile,
|
|
2236
|
+
absolutePath: assetPath,
|
|
2237
|
+
history
|
|
2238
|
+
};
|
|
2239
|
+
}
|
|
2240
|
+
/**
|
|
2241
|
+
* Returns the found and supported extension as well as mime type,
|
|
2242
|
+
* otherwise throws an error
|
|
2243
|
+
*
|
|
2244
|
+
* @param filePath Path to the file to check
|
|
2245
|
+
*/
|
|
2246
|
+
getFileType(filePath) {
|
|
2247
|
+
const mimeType = mime.getType(filePath);
|
|
2248
|
+
if (mimeType === null) throw new Error(`Unsupported MIME type of file "${filePath}"`);
|
|
2249
|
+
const extension = mime.getExtension(mimeType);
|
|
2250
|
+
if (extension === null) throw new Error(`Unsupported extension for MIME type "${mimeType}" of file "${filePath}"`);
|
|
2251
|
+
return {
|
|
2252
|
+
extension,
|
|
2253
|
+
mimeType
|
|
2254
|
+
};
|
|
2255
|
+
}
|
|
2256
|
+
/**
|
|
2257
|
+
* Migrates an potentially outdated Asset file to the current schema
|
|
2258
|
+
*/
|
|
2259
|
+
migrate(potentiallyOutdatedAssetFile) {
|
|
2260
|
+
return assetFileSchema.parse(potentiallyOutdatedAssetFile);
|
|
2261
|
+
}
|
|
2262
|
+
};
|
|
2263
|
+
|
|
2264
|
+
//#endregion
|
|
2265
|
+
//#region src/service/CollectionService.ts
|
|
2266
|
+
/**
|
|
2267
|
+
* Service that manages CRUD functionality for Collection files on disk
|
|
2268
|
+
*/
|
|
2269
|
+
var CollectionService = class extends AbstractCrudService {
|
|
2270
|
+
jsonFileService;
|
|
2271
|
+
gitService;
|
|
2272
|
+
constructor(options, logService, jsonFileService, gitService) {
|
|
2273
|
+
super(serviceTypeSchema.enum.Collection, options, logService);
|
|
2274
|
+
this.jsonFileService = jsonFileService;
|
|
2275
|
+
this.gitService = gitService;
|
|
2276
|
+
}
|
|
2277
|
+
/**
|
|
2278
|
+
* Creates a new Collection
|
|
2279
|
+
*/
|
|
2280
|
+
async create(props) {
|
|
2281
|
+
createCollectionSchema.parse(props);
|
|
2282
|
+
const id = uuid();
|
|
2283
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2284
|
+
const collectionPath = pathTo.collection(props.projectId, id);
|
|
2285
|
+
const collectionFilePath = pathTo.collectionFile(props.projectId, id);
|
|
2286
|
+
const collectionFile = {
|
|
2287
|
+
...props,
|
|
2288
|
+
objectType: "collection",
|
|
2289
|
+
id,
|
|
2290
|
+
slug: {
|
|
2291
|
+
singular: slug(props.slug.singular),
|
|
2292
|
+
plural: slug(props.slug.plural)
|
|
2293
|
+
},
|
|
2294
|
+
created: datetime(),
|
|
2295
|
+
updated: null
|
|
2296
|
+
};
|
|
2297
|
+
await Fs.ensureDir(collectionPath);
|
|
2298
|
+
await this.jsonFileService.create(collectionFile, collectionFilePath, collectionFileSchema);
|
|
2299
|
+
await this.gitService.add(projectPath, [collectionFilePath]);
|
|
2300
|
+
await this.gitService.commit(projectPath, {
|
|
2301
|
+
method: "create",
|
|
2302
|
+
reference: {
|
|
2303
|
+
objectType: "collection",
|
|
2304
|
+
id
|
|
2305
|
+
}
|
|
2306
|
+
});
|
|
2307
|
+
return this.toCollection(props.projectId, collectionFile);
|
|
2308
|
+
}
|
|
2309
|
+
/**
|
|
2310
|
+
* Returns a Collection by ID
|
|
2311
|
+
*
|
|
2312
|
+
* If a commit hash is provided, the Collection is read from history
|
|
2313
|
+
*/
|
|
2314
|
+
async read(props) {
|
|
2315
|
+
readCollectionSchema.parse(props);
|
|
2316
|
+
if (!props.commitHash) {
|
|
2317
|
+
const collectionFile = await this.jsonFileService.read(pathTo.collectionFile(props.projectId, props.id), collectionFileSchema);
|
|
2318
|
+
return this.toCollection(props.projectId, collectionFile);
|
|
2319
|
+
} else {
|
|
2320
|
+
const collectionFile = this.migrate(JSON.parse(await this.gitService.getFileContentAtCommit(pathTo.project(props.projectId), pathTo.collectionFile(props.projectId, props.id), props.commitHash)));
|
|
2321
|
+
return this.toCollection(props.projectId, collectionFile);
|
|
2322
|
+
}
|
|
2323
|
+
}
|
|
2324
|
+
/**
|
|
2325
|
+
* Updates given Collection
|
|
2326
|
+
*
|
|
2327
|
+
* @todo finish implementing checks for FieldDefinitions and extract methods
|
|
2328
|
+
*
|
|
2329
|
+
* @param projectId Project ID of the collection to update
|
|
2330
|
+
* @param collection Collection to write to disk
|
|
2331
|
+
* @returns An object containing information about the actions needed to be taken,
|
|
2332
|
+
* before given update can be executed or void if the update was executed successfully
|
|
2333
|
+
*/
|
|
2334
|
+
async update(props) {
|
|
2335
|
+
updateCollectionSchema.parse(props);
|
|
2336
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2337
|
+
const collectionFilePath = pathTo.collectionFile(props.projectId, props.id);
|
|
2338
|
+
const collectionFile = {
|
|
2339
|
+
...await this.read(props),
|
|
2340
|
+
...props,
|
|
2341
|
+
updated: datetime()
|
|
2342
|
+
};
|
|
2343
|
+
await this.jsonFileService.update(collectionFile, collectionFilePath, collectionFileSchema);
|
|
2344
|
+
await this.gitService.add(projectPath, [collectionFilePath]);
|
|
2345
|
+
await this.gitService.commit(projectPath, {
|
|
2346
|
+
method: "update",
|
|
2347
|
+
reference: {
|
|
2348
|
+
objectType: "collection",
|
|
2349
|
+
id: collectionFile.id
|
|
2350
|
+
}
|
|
2351
|
+
});
|
|
2352
|
+
return this.toCollection(props.projectId, collectionFile);
|
|
2353
|
+
}
|
|
2354
|
+
/**
|
|
2355
|
+
* Deletes given Collection (folder), including it's items
|
|
2356
|
+
*
|
|
2357
|
+
* The Fields that Collection used are not deleted.
|
|
2358
|
+
*/
|
|
2359
|
+
async delete(props) {
|
|
2360
|
+
deleteCollectionSchema.parse(props);
|
|
2361
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2362
|
+
const collectionPath = pathTo.collection(props.projectId, props.id);
|
|
2363
|
+
await Fs.remove(collectionPath);
|
|
2364
|
+
await this.gitService.add(projectPath, [collectionPath]);
|
|
2365
|
+
await this.gitService.commit(projectPath, {
|
|
2366
|
+
method: "delete",
|
|
2367
|
+
reference: {
|
|
2368
|
+
objectType: "collection",
|
|
2369
|
+
id: props.id
|
|
2370
|
+
}
|
|
2371
|
+
});
|
|
2372
|
+
}
|
|
2373
|
+
async list(props) {
|
|
2374
|
+
listCollectionsSchema.parse(props);
|
|
2375
|
+
const offset = props.offset || 0;
|
|
2376
|
+
const limit = props.limit || 15;
|
|
2377
|
+
const collectionReferences = await this.listReferences(objectTypeSchema.enum.collection, props.projectId);
|
|
2378
|
+
const partialCollectionReferences = collectionReferences.slice(offset, limit);
|
|
2379
|
+
const collections = await this.returnResolved(partialCollectionReferences.map((reference) => {
|
|
2380
|
+
return this.read({
|
|
2381
|
+
projectId: props.projectId,
|
|
2382
|
+
id: reference.id
|
|
2383
|
+
});
|
|
2384
|
+
}));
|
|
2385
|
+
return {
|
|
2386
|
+
total: collectionReferences.length,
|
|
2387
|
+
limit,
|
|
2388
|
+
offset,
|
|
2389
|
+
list: collections
|
|
2390
|
+
};
|
|
2391
|
+
}
|
|
2392
|
+
async count(props) {
|
|
2393
|
+
countCollectionsSchema.parse(props);
|
|
2394
|
+
return (await this.listReferences(objectTypeSchema.enum.collection, props.projectId)).length;
|
|
2395
|
+
}
|
|
2396
|
+
/**
|
|
2397
|
+
* Checks if given object is of type Collection
|
|
2398
|
+
*/
|
|
2399
|
+
isCollection(obj) {
|
|
2400
|
+
return collectionFileSchema.safeParse(obj).success;
|
|
2401
|
+
}
|
|
2402
|
+
/**
|
|
2403
|
+
* Migrates an potentially outdated Collection file to the current schema
|
|
2404
|
+
*/
|
|
2405
|
+
migrate(potentiallyOutdatedCollectionFile) {
|
|
2406
|
+
return collectionFileSchema.parse(potentiallyOutdatedCollectionFile);
|
|
2407
|
+
}
|
|
2408
|
+
/**
|
|
2409
|
+
* Creates an Collection from given CollectionFile
|
|
2410
|
+
*
|
|
2411
|
+
* @param projectId The project's ID
|
|
2412
|
+
* @param collectionFile The CollectionFile to convert
|
|
2413
|
+
*/
|
|
2414
|
+
async toCollection(projectId, collectionFile) {
|
|
2415
|
+
const history = await this.gitService.log(pathTo.project(projectId), { filePath: pathTo.collectionFile(projectId, collectionFile.id) });
|
|
2416
|
+
return {
|
|
2417
|
+
...collectionFile,
|
|
2418
|
+
history
|
|
2419
|
+
};
|
|
2420
|
+
}
|
|
2421
|
+
};
|
|
2422
|
+
|
|
2423
|
+
//#endregion
|
|
2424
|
+
//#region src/service/EntryService.ts
|
|
2425
|
+
/**
|
|
2426
|
+
* Service that manages CRUD functionality for Entry files on disk
|
|
2427
|
+
*/
|
|
2428
|
+
var EntryService = class extends AbstractCrudService {
|
|
2429
|
+
jsonFileService;
|
|
2430
|
+
gitService;
|
|
2431
|
+
collectionService;
|
|
2432
|
+
constructor(options, logService, jsonFileService, gitService, collectionService) {
|
|
2433
|
+
super(serviceTypeSchema.enum.Entry, options, logService);
|
|
2434
|
+
this.jsonFileService = jsonFileService;
|
|
2435
|
+
this.gitService = gitService;
|
|
2436
|
+
this.collectionService = collectionService;
|
|
2437
|
+
}
|
|
2438
|
+
/**
|
|
2439
|
+
* Creates a new Entry for given Collection
|
|
2440
|
+
*/
|
|
2441
|
+
async create(props) {
|
|
2442
|
+
createEntrySchema.parse(props);
|
|
2443
|
+
const id = uuid();
|
|
2444
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2445
|
+
const entryFilePath = pathTo.entryFile(props.projectId, props.collectionId, id);
|
|
2446
|
+
const collection = await this.collectionService.read({
|
|
2447
|
+
projectId: props.projectId,
|
|
2448
|
+
id: props.collectionId
|
|
2449
|
+
});
|
|
2450
|
+
const entryFile = {
|
|
2451
|
+
objectType: "entry",
|
|
2452
|
+
id,
|
|
2453
|
+
values: props.values,
|
|
2454
|
+
created: datetime(),
|
|
2455
|
+
updated: null
|
|
2456
|
+
};
|
|
2457
|
+
const entry = await this.toEntry(props.projectId, props.collectionId, entryFile);
|
|
2458
|
+
getCreateEntrySchemaFromFieldDefinitions(collection.fieldDefinitions).parse(props);
|
|
2459
|
+
await this.jsonFileService.create(entryFile, entryFilePath, entryFileSchema);
|
|
2460
|
+
await this.gitService.add(projectPath, [entryFilePath]);
|
|
2461
|
+
await this.gitService.commit(projectPath, {
|
|
2462
|
+
method: "create",
|
|
2463
|
+
reference: {
|
|
2464
|
+
objectType: "entry",
|
|
2465
|
+
id: entryFile.id,
|
|
2466
|
+
collectionId: props.collectionId
|
|
2467
|
+
}
|
|
2468
|
+
});
|
|
2469
|
+
return entry;
|
|
2470
|
+
}
|
|
2471
|
+
/**
|
|
2472
|
+
* Returns an Entry from given Collection by ID
|
|
2473
|
+
*
|
|
2474
|
+
* If a commit hash is provided, the Entry is read from history
|
|
2475
|
+
*/
|
|
2476
|
+
async read(props) {
|
|
2477
|
+
readEntrySchema.parse(props);
|
|
2478
|
+
if (!props.commitHash) {
|
|
2479
|
+
const entryFile = await this.jsonFileService.read(pathTo.entryFile(props.projectId, props.collectionId, props.id), entryFileSchema);
|
|
2480
|
+
return this.toEntry(props.projectId, props.collectionId, entryFile);
|
|
2481
|
+
} else {
|
|
2482
|
+
const entryFile = this.migrate(JSON.parse(await this.gitService.getFileContentAtCommit(pathTo.project(props.projectId), pathTo.entryFile(props.projectId, props.collectionId, props.id), props.commitHash)));
|
|
2483
|
+
return this.toEntry(props.projectId, props.collectionId, entryFile);
|
|
2484
|
+
}
|
|
2485
|
+
}
|
|
2486
|
+
/**
|
|
2487
|
+
* Updates an Entry of given Collection with new Values and shared Values
|
|
2488
|
+
*/
|
|
2489
|
+
async update(props) {
|
|
2490
|
+
updateEntrySchema.parse(props);
|
|
2491
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2492
|
+
const entryFilePath = pathTo.entryFile(props.projectId, props.collectionId, props.id);
|
|
2493
|
+
const collection = await this.collectionService.read({
|
|
2494
|
+
projectId: props.projectId,
|
|
2495
|
+
id: props.collectionId
|
|
2496
|
+
});
|
|
2497
|
+
const entryFile = {
|
|
2498
|
+
...await this.read({
|
|
2499
|
+
projectId: props.projectId,
|
|
2500
|
+
collectionId: props.collectionId,
|
|
2501
|
+
id: props.id
|
|
2502
|
+
}),
|
|
2503
|
+
values: props.values,
|
|
2504
|
+
updated: datetime()
|
|
2505
|
+
};
|
|
2506
|
+
const entry = await this.toEntry(props.projectId, props.collectionId, entryFile);
|
|
2507
|
+
getUpdateEntrySchemaFromFieldDefinitions(collection.fieldDefinitions).parse(props);
|
|
2508
|
+
await this.jsonFileService.update(entryFile, entryFilePath, entryFileSchema);
|
|
2509
|
+
await this.gitService.add(projectPath, [entryFilePath]);
|
|
2510
|
+
await this.gitService.commit(projectPath, {
|
|
2511
|
+
method: "update",
|
|
2512
|
+
reference: {
|
|
2513
|
+
objectType: "entry",
|
|
2514
|
+
id: entryFile.id,
|
|
2515
|
+
collectionId: props.collectionId
|
|
2516
|
+
}
|
|
2517
|
+
});
|
|
2518
|
+
return entry;
|
|
2519
|
+
}
|
|
2520
|
+
/**
|
|
2521
|
+
* Deletes given Entry from it's Collection
|
|
2522
|
+
*/
|
|
2523
|
+
async delete(props) {
|
|
2524
|
+
deleteEntrySchema.parse(props);
|
|
2525
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2526
|
+
const entryFilePath = pathTo.entryFile(props.projectId, props.collectionId, props.id);
|
|
2527
|
+
await Fs.remove(entryFilePath);
|
|
2528
|
+
await this.gitService.add(projectPath, [entryFilePath]);
|
|
2529
|
+
await this.gitService.commit(projectPath, {
|
|
2530
|
+
method: "delete",
|
|
2531
|
+
reference: {
|
|
2532
|
+
objectType: "entry",
|
|
2533
|
+
id: props.id,
|
|
2534
|
+
collectionId: props.collectionId
|
|
2535
|
+
}
|
|
2536
|
+
});
|
|
2537
|
+
}
|
|
2538
|
+
async list(props) {
|
|
2539
|
+
listEntriesSchema.parse(props);
|
|
2540
|
+
const offset = props.offset || 0;
|
|
2541
|
+
const limit = props.limit || 15;
|
|
2542
|
+
const entryReferences = await this.listReferences(objectTypeSchema.enum.entry, props.projectId, props.collectionId);
|
|
2543
|
+
const partialEntryReferences = entryReferences.slice(offset, limit);
|
|
2544
|
+
const entries = await this.returnResolved(partialEntryReferences.map((reference) => {
|
|
2545
|
+
return this.read({
|
|
2546
|
+
projectId: props.projectId,
|
|
2547
|
+
collectionId: props.collectionId,
|
|
2548
|
+
id: reference.id
|
|
2549
|
+
});
|
|
2550
|
+
}));
|
|
2551
|
+
return {
|
|
2552
|
+
total: entryReferences.length,
|
|
2553
|
+
limit,
|
|
2554
|
+
offset,
|
|
2555
|
+
list: entries
|
|
2556
|
+
};
|
|
2557
|
+
}
|
|
2558
|
+
async count(props) {
|
|
2559
|
+
countEntriesSchema.parse(props);
|
|
2560
|
+
return (await this.listReferences(objectTypeSchema.enum.entry, props.projectId, props.collectionId)).length;
|
|
2561
|
+
}
|
|
2562
|
+
/**
|
|
2563
|
+
* Checks if given object is of type Entry
|
|
2564
|
+
*/
|
|
2565
|
+
isEntry(obj) {
|
|
2566
|
+
return entrySchema.safeParse(obj).success;
|
|
2567
|
+
}
|
|
2568
|
+
/**
|
|
2569
|
+
* Migrates an potentially outdated Entry file to the current schema
|
|
2570
|
+
*/
|
|
2571
|
+
migrate(potentiallyOutdatedEntryFile) {
|
|
2572
|
+
return entryFileSchema.parse(potentiallyOutdatedEntryFile);
|
|
2573
|
+
}
|
|
2574
|
+
/**
|
|
2575
|
+
* Creates an Entry from given EntryFile by resolving it's Values
|
|
2576
|
+
*/
|
|
2577
|
+
async toEntry(projectId, collectionId, entryFile) {
|
|
2578
|
+
const history = await this.gitService.log(pathTo.project(projectId), { filePath: pathTo.entryFile(projectId, collectionId, entryFile.id) });
|
|
2579
|
+
return {
|
|
2580
|
+
...entryFile,
|
|
2581
|
+
history
|
|
2582
|
+
};
|
|
2583
|
+
}
|
|
2584
|
+
};
|
|
2585
|
+
|
|
2586
|
+
//#endregion
|
|
2587
|
+
//#region src/service/GitTagService.ts
|
|
2588
|
+
/**
|
|
2589
|
+
* Service that manages CRUD functionality for GitTags
|
|
2590
|
+
*/
|
|
2591
|
+
var GitTagService = class extends AbstractCrudService {
|
|
2592
|
+
git;
|
|
2593
|
+
constructor(options, git, logService) {
|
|
2594
|
+
super(serviceTypeSchema.enum.GitTag, options, logService);
|
|
2595
|
+
this.git = git;
|
|
2596
|
+
}
|
|
2597
|
+
/**
|
|
2598
|
+
* Creates a new tag
|
|
2599
|
+
*
|
|
2600
|
+
* @see https://git-scm.com/docs/git-tag#Documentation/git-tag.txt---annotate
|
|
2601
|
+
*/
|
|
2602
|
+
async create(props) {
|
|
2603
|
+
createGitTagSchema.parse(props);
|
|
2604
|
+
const id = uuid();
|
|
2605
|
+
let args = [
|
|
2606
|
+
"tag",
|
|
2607
|
+
"--annotate",
|
|
2608
|
+
id
|
|
2609
|
+
];
|
|
2610
|
+
if (props.hash) args = [...args, props.hash];
|
|
2611
|
+
args = [
|
|
2612
|
+
...args,
|
|
2613
|
+
"-m",
|
|
2614
|
+
props.message
|
|
2615
|
+
];
|
|
2616
|
+
await this.git(props.path, args);
|
|
2617
|
+
return await this.read({
|
|
2618
|
+
path: props.path,
|
|
2619
|
+
id
|
|
2620
|
+
});
|
|
2621
|
+
}
|
|
2622
|
+
/**
|
|
2623
|
+
* Returns a tag by ID
|
|
2624
|
+
*
|
|
2625
|
+
* Internally uses list() but only returns the tag with matching ID.
|
|
2626
|
+
*/
|
|
2627
|
+
async read(props) {
|
|
2628
|
+
readGitTagSchema.parse(props);
|
|
2629
|
+
const tag = (await this.list({ path: props.path })).list.find((tag$1) => {
|
|
2630
|
+
return tag$1.id === props.id;
|
|
2631
|
+
});
|
|
2632
|
+
if (!tag) throw new GitError(`Provided tag with UUID "${props.id}" did not match any known tags`);
|
|
2633
|
+
return tag;
|
|
2634
|
+
}
|
|
2635
|
+
/**
|
|
2636
|
+
* Updating a git tag is not supported.
|
|
2637
|
+
* Please delete the old and create a new one
|
|
2638
|
+
*
|
|
2639
|
+
* @deprecated
|
|
2640
|
+
* @see https://git-scm.com/docs/git-tag#_on_re_tagging
|
|
2641
|
+
*/
|
|
2642
|
+
update() {
|
|
2643
|
+
throw new Error("Updating a git tag is not supported. Please delete the old and create a new one");
|
|
2644
|
+
}
|
|
2645
|
+
/**
|
|
2646
|
+
* Deletes a tag
|
|
2647
|
+
*
|
|
2648
|
+
* @see https://git-scm.com/docs/git-tag#Documentation/git-tag.txt---delete
|
|
2649
|
+
*
|
|
2650
|
+
* @param path Path to the repository
|
|
2651
|
+
* @param id UUID of the tag to delete
|
|
2652
|
+
*/
|
|
2653
|
+
async delete(props) {
|
|
2654
|
+
deleteGitTagSchema.parse(props);
|
|
2655
|
+
const args = [
|
|
2656
|
+
"tag",
|
|
2657
|
+
"--delete",
|
|
2658
|
+
props.id
|
|
2659
|
+
];
|
|
2660
|
+
await this.git(props.path, args);
|
|
2661
|
+
}
|
|
2662
|
+
/**
|
|
2663
|
+
* Gets all local tags or filter them by pattern
|
|
2664
|
+
*
|
|
2665
|
+
* They are sorted by authordate of the commit, not when the tag is created.
|
|
2666
|
+
* This ensures tags are sorted correctly in the timeline of their commits.
|
|
2667
|
+
*
|
|
2668
|
+
* @see https://git-scm.com/docs/git-tag#Documentation/git-tag.txt---list
|
|
2669
|
+
*/
|
|
2670
|
+
async list(props) {
|
|
2671
|
+
listGitTagsSchema.parse(props);
|
|
2672
|
+
let args = ["tag", "--list"];
|
|
2673
|
+
args = [
|
|
2674
|
+
...args,
|
|
2675
|
+
"--sort=-*authordate",
|
|
2676
|
+
"--format=%(refname:short)|%(subject)|%(*authorname)|%(*authoremail)|%(*authordate:iso-strict)"
|
|
2677
|
+
];
|
|
2678
|
+
const gitTags = (await this.git(props.path, args)).stdout.split("\n").filter((line) => {
|
|
2679
|
+
return line.trim() !== "";
|
|
2680
|
+
}).map((line) => {
|
|
2681
|
+
const lineArray = line.split("|");
|
|
2682
|
+
if (lineArray[3]?.startsWith("<") && lineArray[3]?.endsWith(">")) {
|
|
2683
|
+
lineArray[3] = lineArray[3].slice(1, -1);
|
|
2684
|
+
lineArray[3] = lineArray[3].slice(0, -1);
|
|
2685
|
+
}
|
|
2686
|
+
return {
|
|
2687
|
+
id: lineArray[0],
|
|
2688
|
+
message: lineArray[1],
|
|
2689
|
+
author: {
|
|
2690
|
+
name: lineArray[2],
|
|
2691
|
+
email: lineArray[3]
|
|
2692
|
+
},
|
|
2693
|
+
datetime: datetime(lineArray[4])
|
|
2694
|
+
};
|
|
2695
|
+
}).filter(this.isGitTag.bind(this));
|
|
2696
|
+
return {
|
|
2697
|
+
total: gitTags.length,
|
|
2698
|
+
limit: 0,
|
|
2699
|
+
offset: 0,
|
|
2700
|
+
list: gitTags
|
|
2701
|
+
};
|
|
2702
|
+
}
|
|
2703
|
+
/**
|
|
2704
|
+
* Returns the total number of tags inside given repository
|
|
2705
|
+
*
|
|
2706
|
+
* Internally uses list(), so do not use count()
|
|
2707
|
+
* in conjuncion with it to avoid multiple git calls.
|
|
2708
|
+
*
|
|
2709
|
+
* @param path Path to the repository
|
|
2710
|
+
*/
|
|
2711
|
+
async count(props) {
|
|
2712
|
+
countGitTagsSchema.parse(props);
|
|
2713
|
+
return (await this.list({ path: props.path })).total;
|
|
2714
|
+
}
|
|
2715
|
+
/**
|
|
2716
|
+
* Type guard for GitTag
|
|
2717
|
+
*
|
|
2718
|
+
* @param obj The object to check
|
|
2719
|
+
*/
|
|
2720
|
+
isGitTag(obj) {
|
|
2721
|
+
return gitTagSchema.safeParse(obj).success;
|
|
2722
|
+
}
|
|
2723
|
+
};
|
|
2724
|
+
|
|
2725
|
+
//#endregion
|
|
2726
|
+
//#region src/service/GitService.ts
|
|
2727
|
+
/**
|
|
2728
|
+
* Service that manages Git functionality
|
|
2729
|
+
*
|
|
2730
|
+
* Uses dugite Node.js bindings for Git to be fully compatible
|
|
2731
|
+
* and be able to leverage Git LFS functionality
|
|
2732
|
+
* @see https://github.com/desktop/dugite
|
|
2733
|
+
*
|
|
2734
|
+
* Heavily inspired by the GitHub Desktop app
|
|
2735
|
+
* @see https://github.com/desktop/desktop
|
|
2736
|
+
*
|
|
2737
|
+
* Git operations are sequential!
|
|
2738
|
+
* We use a FIFO queue to translate async calls
|
|
2739
|
+
* into a sequence of git operations
|
|
2740
|
+
*
|
|
2741
|
+
* @todo All public methods should recieve only a single object as parameter and the type should be defined through the shared library to be accessible in Core and Client
|
|
2742
|
+
*/
|
|
2743
|
+
var GitService = class {
|
|
2744
|
+
version;
|
|
2745
|
+
gitPath;
|
|
2746
|
+
queue;
|
|
2747
|
+
logService;
|
|
2748
|
+
gitTagService;
|
|
2749
|
+
userService;
|
|
2750
|
+
constructor(options, logService, userService) {
|
|
2751
|
+
this.version = null;
|
|
2752
|
+
this.gitPath = null;
|
|
2753
|
+
this.queue = new PQueue({ concurrency: 1 });
|
|
2754
|
+
this.gitTagService = new GitTagService(options, this.git.bind(this), logService);
|
|
2755
|
+
this.logService = logService;
|
|
2756
|
+
this.userService = userService;
|
|
2757
|
+
this.updateVersion();
|
|
2758
|
+
this.updateGitPath();
|
|
2759
|
+
}
|
|
2760
|
+
/**
|
|
2761
|
+
* CRUD methods to work with git tags
|
|
2762
|
+
*/
|
|
2763
|
+
get tags() {
|
|
2764
|
+
return this.gitTagService;
|
|
2765
|
+
}
|
|
2766
|
+
/**
|
|
2767
|
+
* Create an empty Git repository or reinitialize an existing one
|
|
2768
|
+
*
|
|
2769
|
+
* @see https://git-scm.com/docs/git-init
|
|
2770
|
+
*
|
|
2771
|
+
* @param path Path to initialize in. Fails if path does not exist
|
|
2772
|
+
* @param options Options specific to the init operation
|
|
2773
|
+
*/
|
|
2774
|
+
async init(path, options) {
|
|
2775
|
+
let args = ["init"];
|
|
2776
|
+
if (options?.initialBranch) args = [...args, `--initial-branch=${options.initialBranch}`];
|
|
2777
|
+
await this.git(path, args);
|
|
2778
|
+
await this.setLocalConfig(path);
|
|
2779
|
+
}
|
|
2780
|
+
/**
|
|
2781
|
+
* Clone a repository into a directory
|
|
2782
|
+
*
|
|
2783
|
+
* @see https://git-scm.com/docs/git-clone
|
|
2784
|
+
*
|
|
2785
|
+
* @todo Implement progress callback / events
|
|
2786
|
+
*
|
|
2787
|
+
* @param url The remote repository URL to clone from
|
|
2788
|
+
* @param path The destination path for the cloned repository.
|
|
2789
|
+
* Which is only working if the directory is existing and empty.
|
|
2790
|
+
* @param options Options specific to the clone operation
|
|
2791
|
+
*/
|
|
2792
|
+
async clone(url, path, options) {
|
|
2793
|
+
let args = ["clone", "--progress"];
|
|
2794
|
+
if (options?.bare) args = [...args, "--bare"];
|
|
2795
|
+
if (options?.branch) args = [
|
|
2796
|
+
...args,
|
|
2797
|
+
"--branch",
|
|
2798
|
+
options.branch
|
|
2799
|
+
];
|
|
2800
|
+
if (options?.depth) args = [
|
|
2801
|
+
...args,
|
|
2802
|
+
"--depth",
|
|
2803
|
+
options.depth.toString()
|
|
2804
|
+
];
|
|
2805
|
+
if (options?.singleBranch === true) args = [...args, "--single-branch"];
|
|
2806
|
+
await this.git("", [
|
|
2807
|
+
...args,
|
|
2808
|
+
url,
|
|
2809
|
+
path
|
|
2810
|
+
]);
|
|
2811
|
+
await this.setLocalConfig(path);
|
|
2812
|
+
}
|
|
2813
|
+
/**
|
|
2814
|
+
* Add file contents to the index
|
|
2815
|
+
*
|
|
2816
|
+
* @see https://git-scm.com/docs/git-add
|
|
2817
|
+
*
|
|
2818
|
+
* @param path Path to the repository
|
|
2819
|
+
* @param files Files to add
|
|
2820
|
+
*/
|
|
2821
|
+
async add(path, files$2) {
|
|
2822
|
+
const args = [
|
|
2823
|
+
"add",
|
|
2824
|
+
"--",
|
|
2825
|
+
...files$2.map((filePath) => {
|
|
2826
|
+
return filePath.replace(`${path}${Path.sep}`, "");
|
|
2827
|
+
})
|
|
2828
|
+
];
|
|
2829
|
+
await this.git(path, args);
|
|
2830
|
+
}
|
|
2831
|
+
async status(path) {
|
|
2832
|
+
return (await this.git(path, ["status", "--porcelain=2"])).stdout.split("\n").filter((line) => {
|
|
2833
|
+
return line.trim() !== "";
|
|
2834
|
+
}).map((line) => {
|
|
2835
|
+
return { filePath: line.trim().split(" ")[8] };
|
|
2836
|
+
});
|
|
2837
|
+
}
|
|
2838
|
+
branches = {
|
|
2839
|
+
list: async (path) => {
|
|
2840
|
+
const normalizedLinesArr = (await this.git(path, [
|
|
2841
|
+
"branch",
|
|
2842
|
+
"--list",
|
|
2843
|
+
"--all"
|
|
2844
|
+
])).stdout.split("\n").filter((line) => {
|
|
2845
|
+
return line.trim() !== "";
|
|
2846
|
+
}).map((line) => {
|
|
2847
|
+
return line.trim().replace("* ", "");
|
|
2848
|
+
});
|
|
2849
|
+
const local = [];
|
|
2850
|
+
const remote = [];
|
|
2851
|
+
normalizedLinesArr.forEach((line) => {
|
|
2852
|
+
if (line.startsWith("remotes/")) remote.push(line.replace("remotes/", ""));
|
|
2853
|
+
else local.push(line);
|
|
2854
|
+
});
|
|
2855
|
+
return {
|
|
2856
|
+
local,
|
|
2857
|
+
remote
|
|
2858
|
+
};
|
|
2859
|
+
},
|
|
2860
|
+
current: async (path) => {
|
|
2861
|
+
return (await this.git(path, ["branch", "--show-current"])).stdout.trim();
|
|
2862
|
+
},
|
|
2863
|
+
switch: async (path, branch, options) => {
|
|
2864
|
+
await this.checkBranchOrTagName(path, branch);
|
|
2865
|
+
let args = ["switch"];
|
|
2866
|
+
if (options?.isNew === true) args = [
|
|
2867
|
+
...args,
|
|
2868
|
+
"--create",
|
|
2869
|
+
branch
|
|
2870
|
+
];
|
|
2871
|
+
else args = [...args, branch];
|
|
2872
|
+
await this.git(path, args);
|
|
2873
|
+
},
|
|
2874
|
+
delete: async (path, branch, force) => {
|
|
2875
|
+
let args = ["branch", "--delete"];
|
|
2876
|
+
if (force === true) args = [...args, "--force"];
|
|
2877
|
+
await this.git(path, [...args, branch]);
|
|
2878
|
+
}
|
|
2879
|
+
};
|
|
2880
|
+
remotes = {
|
|
2881
|
+
list: async (path) => {
|
|
2882
|
+
return (await this.git(path, ["remote"])).stdout.split("\n").filter((line) => {
|
|
2883
|
+
return line.trim() !== "";
|
|
2884
|
+
});
|
|
2885
|
+
},
|
|
2886
|
+
hasOrigin: async (path) => {
|
|
2887
|
+
if ((await this.remotes.list(path)).includes("origin")) return true;
|
|
2888
|
+
return false;
|
|
2889
|
+
},
|
|
2890
|
+
addOrigin: async (path, url) => {
|
|
2891
|
+
const args = [
|
|
2892
|
+
"remote",
|
|
2893
|
+
"add",
|
|
2894
|
+
"origin",
|
|
2895
|
+
url.trim()
|
|
2896
|
+
];
|
|
2897
|
+
await this.git(path, args);
|
|
2898
|
+
},
|
|
2899
|
+
getOriginUrl: async (path) => {
|
|
2900
|
+
const result = (await this.git(path, [
|
|
2901
|
+
"remote",
|
|
2902
|
+
"get-url",
|
|
2903
|
+
"origin"
|
|
2904
|
+
])).stdout.trim();
|
|
2905
|
+
return result.length === 0 ? null : result;
|
|
2906
|
+
},
|
|
2907
|
+
setOriginUrl: async (path, url) => {
|
|
2908
|
+
const args = [
|
|
2909
|
+
"remote",
|
|
2910
|
+
"set-url",
|
|
2911
|
+
"origin",
|
|
2912
|
+
url.trim()
|
|
2913
|
+
];
|
|
2914
|
+
await this.git(path, args);
|
|
2915
|
+
}
|
|
2916
|
+
};
|
|
2917
|
+
/**
|
|
2918
|
+
* Join two development histories together
|
|
2919
|
+
*
|
|
2920
|
+
* @see https://git-scm.com/docs/git-merge
|
|
2921
|
+
*/
|
|
2922
|
+
async merge(path, branch, options) {
|
|
2923
|
+
let args = ["merge"];
|
|
2924
|
+
if (options?.squash === true) args = [...args, "--squash"];
|
|
2925
|
+
args = [...args, branch];
|
|
2926
|
+
await this.git(path, args);
|
|
2927
|
+
}
|
|
2928
|
+
/**
|
|
2929
|
+
* Reset current HEAD to the specified state
|
|
2930
|
+
*
|
|
2931
|
+
* @todo maybe add more options
|
|
2932
|
+
* @see https://git-scm.com/docs/git-reset
|
|
2933
|
+
*
|
|
2934
|
+
* @param path Path to the repository
|
|
2935
|
+
* @param mode Modifies the working tree depending on given mode
|
|
2936
|
+
* @param commit Resets the current branch head to this commit / tag
|
|
2937
|
+
*/
|
|
2938
|
+
async reset(path, mode, commit) {
|
|
2939
|
+
const args = [
|
|
2940
|
+
"reset",
|
|
2941
|
+
`--${mode}`,
|
|
2942
|
+
commit
|
|
2943
|
+
];
|
|
2944
|
+
await this.git(path, args);
|
|
2945
|
+
}
|
|
2946
|
+
/**
|
|
2947
|
+
* Restore working tree files
|
|
2948
|
+
*
|
|
2949
|
+
* @see https://git-scm.com/docs/git-restore/
|
|
2950
|
+
*
|
|
2951
|
+
* @todo It's probably a good idea to not use restore
|
|
2952
|
+
* for a use case where someone just wants to have a look
|
|
2953
|
+
* and maybe copy something from a deleted file.
|
|
2954
|
+
* We should use `checkout` without `add .` and `commit` for that
|
|
2955
|
+
*
|
|
2956
|
+
* @param path Path to the repository
|
|
2957
|
+
* @param source Git commit SHA or tag name to restore to
|
|
2958
|
+
* @param files Files to restore
|
|
2959
|
+
*/
|
|
2960
|
+
/**
|
|
2961
|
+
* Download objects and refs from remote `origin`
|
|
2962
|
+
*
|
|
2963
|
+
* @see https://www.git-scm.com/docs/git-fetch
|
|
2964
|
+
*
|
|
2965
|
+
* @param path Path to the repository
|
|
2966
|
+
*/
|
|
2967
|
+
async fetch(path) {
|
|
2968
|
+
await this.git(path, ["fetch"]);
|
|
2969
|
+
}
|
|
2970
|
+
/**
|
|
2971
|
+
* Fetch from and integrate (rebase or merge) with a local branch
|
|
2972
|
+
*
|
|
2973
|
+
* @see https://git-scm.com/docs/git-pull
|
|
2974
|
+
*
|
|
2975
|
+
* @param path Path to the repository
|
|
2976
|
+
*/
|
|
2977
|
+
async pull(path) {
|
|
2978
|
+
await this.git(path, ["pull"]);
|
|
2979
|
+
}
|
|
2980
|
+
/**
|
|
2981
|
+
* Update remote refs along with associated objects to remote `origin`
|
|
2982
|
+
*
|
|
2983
|
+
* @see https://git-scm.com/docs/git-push
|
|
2984
|
+
*
|
|
2985
|
+
* @param path Path to the repository
|
|
2986
|
+
*/
|
|
2987
|
+
async push(path, options) {
|
|
2988
|
+
let args = ["push", "origin"];
|
|
2989
|
+
if (options?.all === true) args = [...args, "--all"];
|
|
2990
|
+
if (options?.force === true) args = [...args, "--force"];
|
|
2991
|
+
await this.git(path, args);
|
|
2992
|
+
}
|
|
2993
|
+
/**
|
|
2994
|
+
* Record changes to the repository
|
|
2995
|
+
*
|
|
2996
|
+
* @see https://git-scm.com/docs/git-commit
|
|
2997
|
+
*
|
|
2998
|
+
* @param path Path to the repository
|
|
2999
|
+
* @param message An object describing the changes
|
|
3000
|
+
*/
|
|
3001
|
+
async commit(path, message) {
|
|
3002
|
+
gitMessageSchema.parse(message);
|
|
3003
|
+
const user = await this.userService.get();
|
|
3004
|
+
if (!user) throw new NoCurrentUserError();
|
|
3005
|
+
const args = [
|
|
3006
|
+
"commit",
|
|
3007
|
+
`--message=${JSON.stringify(message)}`,
|
|
3008
|
+
`--author=${user.name} <${user.email}>`
|
|
3009
|
+
];
|
|
3010
|
+
await this.git(path, args);
|
|
3011
|
+
}
|
|
3012
|
+
/**
|
|
3013
|
+
* Gets local commit history
|
|
3014
|
+
*
|
|
3015
|
+
* @see https://git-scm.com/docs/git-log
|
|
3016
|
+
*
|
|
3017
|
+
* @todo Check if there is a need to trim the git commit message of chars
|
|
3018
|
+
* @todo Use this method in a service. Decide if we need a HistoryService for example
|
|
3019
|
+
*
|
|
3020
|
+
* @param path Path to the repository
|
|
3021
|
+
* @param options Options specific to the log operation
|
|
3022
|
+
*/
|
|
3023
|
+
async log(path, options) {
|
|
3024
|
+
let args = ["log"];
|
|
3025
|
+
if (options?.between?.from) args = [...args, `${options.between.from}..${options.between.to || "HEAD"}`];
|
|
3026
|
+
if (options?.limit) args = [...args, `--max-count=${options.limit}`];
|
|
3027
|
+
args = [...args, "--format=%H|%s|%an|%ae|%aI|%D"];
|
|
3028
|
+
if (options?.filePath) args = [
|
|
3029
|
+
...args,
|
|
3030
|
+
"--",
|
|
3031
|
+
options.filePath
|
|
3032
|
+
];
|
|
3033
|
+
const noEmptyLinesArr = (await this.git(path, args)).stdout.split("\n").filter((line) => {
|
|
3034
|
+
return line.trim() !== "";
|
|
3035
|
+
});
|
|
3036
|
+
return (await Promise.all(noEmptyLinesArr.map(async (line) => {
|
|
3037
|
+
const lineArray = line.split("|");
|
|
3038
|
+
const tagId = this.refNameToTagName(lineArray[5] || "");
|
|
3039
|
+
const tag = tagId ? await this.tags.read({
|
|
3040
|
+
path,
|
|
3041
|
+
id: tagId
|
|
3042
|
+
}) : null;
|
|
3043
|
+
return {
|
|
3044
|
+
hash: lineArray[0],
|
|
3045
|
+
message: JSON.parse(lineArray[1] || ""),
|
|
3046
|
+
author: {
|
|
3047
|
+
name: lineArray[2],
|
|
3048
|
+
email: lineArray[3]
|
|
3049
|
+
},
|
|
3050
|
+
datetime: datetime(lineArray[4]),
|
|
3051
|
+
tag
|
|
3052
|
+
};
|
|
3053
|
+
}))).filter(this.isGitCommit.bind(this));
|
|
3054
|
+
}
|
|
3055
|
+
/**
|
|
3056
|
+
* Retrieves the content of a file at a specific commit
|
|
3057
|
+
*
|
|
3058
|
+
* @see https://git-scm.com/docs/git-show
|
|
3059
|
+
*/
|
|
3060
|
+
async getFileContentAtCommit(path, filePath, commitHash, encoding = "utf8") {
|
|
3061
|
+
const args = ["show", `${commitHash}:${filePath.replace(`${path}${Path.sep}`, "").split("\\").join("/")}`];
|
|
3062
|
+
const setEncoding = (cb) => {
|
|
3063
|
+
if (cb.stdout) cb.stdout.setEncoding(encoding);
|
|
3064
|
+
};
|
|
3065
|
+
return (await this.git(path, args, { processCallback: setEncoding })).stdout;
|
|
3066
|
+
}
|
|
3067
|
+
refNameToTagName(refName) {
|
|
3068
|
+
const tagName = refName.replace("tag: ", "").trim();
|
|
3069
|
+
if (tagName === "" || uuidSchema.safeParse(tagName).success === false) return null;
|
|
3070
|
+
return tagName;
|
|
3071
|
+
}
|
|
3072
|
+
/**
|
|
3073
|
+
* Reads the currently used version of Git
|
|
3074
|
+
*
|
|
3075
|
+
* This can help debugging
|
|
3076
|
+
*/
|
|
3077
|
+
async updateVersion() {
|
|
3078
|
+
this.version = (await this.git("", ["--version"])).stdout.replace("git version", "").trim();
|
|
3079
|
+
}
|
|
3080
|
+
/**
|
|
3081
|
+
* Reads the path to the executable of Git that is used
|
|
3082
|
+
*
|
|
3083
|
+
* This can help debugging, since dugite is shipping their own executable
|
|
3084
|
+
* but in some cases resolves another executable
|
|
3085
|
+
* @see https://github.com/desktop/dugite/blob/main/lib/git-environment.ts
|
|
3086
|
+
*/
|
|
3087
|
+
async updateGitPath() {
|
|
3088
|
+
this.gitPath = (await this.git("", ["--exec-path"])).stdout.trim();
|
|
3089
|
+
}
|
|
3090
|
+
/**
|
|
3091
|
+
* A reference is used in Git to specify branches and tags.
|
|
3092
|
+
* This method checks if given name matches the required format
|
|
3093
|
+
*
|
|
3094
|
+
* @see https://git-scm.com/docs/git-check-ref-format
|
|
3095
|
+
*
|
|
3096
|
+
* @param path Path to the repository
|
|
3097
|
+
* @param name Name to check
|
|
3098
|
+
*/
|
|
3099
|
+
async checkBranchOrTagName(path, name$1) {
|
|
3100
|
+
await this.git(path, [
|
|
3101
|
+
"check-ref-format",
|
|
3102
|
+
"--allow-onelevel",
|
|
3103
|
+
name$1
|
|
3104
|
+
]);
|
|
3105
|
+
}
|
|
3106
|
+
/**
|
|
3107
|
+
* Sets the git config of given local repository from ElekIoCoreOptions
|
|
3108
|
+
*
|
|
3109
|
+
* @param path Path to the repository
|
|
3110
|
+
*/
|
|
3111
|
+
async setLocalConfig(path) {
|
|
3112
|
+
const user = await this.userService.get();
|
|
3113
|
+
if (!user) throw new NoCurrentUserError();
|
|
3114
|
+
const userNameArgs = [
|
|
3115
|
+
"config",
|
|
3116
|
+
"--local",
|
|
3117
|
+
"user.name",
|
|
3118
|
+
user.name
|
|
3119
|
+
];
|
|
3120
|
+
const userEmailArgs = [
|
|
3121
|
+
"config",
|
|
3122
|
+
"--local",
|
|
3123
|
+
"user.email",
|
|
3124
|
+
user.email
|
|
3125
|
+
];
|
|
3126
|
+
const autoSetupRemoteArgs = [
|
|
3127
|
+
"config",
|
|
3128
|
+
"--local",
|
|
3129
|
+
"push.autoSetupRemote",
|
|
3130
|
+
"true"
|
|
3131
|
+
];
|
|
3132
|
+
const pullRebaseArgs = [
|
|
3133
|
+
"config",
|
|
3134
|
+
"--local",
|
|
3135
|
+
"pull.rebase",
|
|
3136
|
+
"true"
|
|
3137
|
+
];
|
|
3138
|
+
await this.git(path, userNameArgs);
|
|
3139
|
+
await this.git(path, userEmailArgs);
|
|
3140
|
+
await this.git(path, autoSetupRemoteArgs);
|
|
3141
|
+
await this.git(path, pullRebaseArgs);
|
|
3142
|
+
}
|
|
3143
|
+
/**
|
|
3144
|
+
* Type guard for GitCommit
|
|
3145
|
+
*
|
|
3146
|
+
* @param obj The object to check
|
|
3147
|
+
*/
|
|
3148
|
+
isGitCommit(obj) {
|
|
3149
|
+
return gitCommitSchema.safeParse(obj).success;
|
|
3150
|
+
}
|
|
3151
|
+
/**
|
|
3152
|
+
* Wraps the execution of any git command
|
|
3153
|
+
* to use a FIFO queue for sequential processing
|
|
3154
|
+
*
|
|
3155
|
+
* @param path Path to the repository
|
|
3156
|
+
* @param args Arguments to append after the `git` command
|
|
3157
|
+
*/
|
|
3158
|
+
async git(path, args, options) {
|
|
3159
|
+
const result = await this.queue.add(async () => {
|
|
3160
|
+
const start = Date.now();
|
|
3161
|
+
return {
|
|
3162
|
+
gitResult: await GitProcess.exec(args, path, options),
|
|
3163
|
+
durationMs: Date.now() - start
|
|
3164
|
+
};
|
|
3165
|
+
});
|
|
3166
|
+
if (!result) throw new GitError(`Git ${this.version} (${this.gitPath}) command "git ${args.join(" ")}" executed for "${path}" failed to return a result`);
|
|
3167
|
+
const gitLog = {
|
|
3168
|
+
source: "core",
|
|
3169
|
+
message: `Executed "git ${args.join(" ")}" in ${result.durationMs}ms`,
|
|
3170
|
+
meta: { command: `git ${args.join(" ")}` }
|
|
3171
|
+
};
|
|
3172
|
+
if (result.durationMs >= 100) this.logService.warn(gitLog);
|
|
3173
|
+
else this.logService.debug(gitLog);
|
|
3174
|
+
if (result.gitResult.exitCode !== 0) throw new GitError(`Git ${this.version} (${this.gitPath}) command "git ${args.join(" ")}" executed for "${path}" failed with exit code "${result.gitResult.exitCode}" and message "${result.gitResult.stderr.trim() || result.gitResult.stdout.trim()}"`);
|
|
3175
|
+
return result.gitResult;
|
|
3176
|
+
}
|
|
3177
|
+
};
|
|
3178
|
+
|
|
3179
|
+
//#endregion
|
|
3180
|
+
//#region src/service/JsonFileService.ts
|
|
3181
|
+
/**
|
|
3182
|
+
* Service that manages CRUD functionality for JSON files on disk
|
|
3183
|
+
*/
|
|
3184
|
+
var JsonFileService = class extends AbstractCrudService {
|
|
3185
|
+
cache = /* @__PURE__ */ new Map();
|
|
3186
|
+
constructor(options, logService) {
|
|
3187
|
+
super(serviceTypeSchema.enum.JsonFile, options, logService);
|
|
3188
|
+
}
|
|
3189
|
+
/**
|
|
3190
|
+
* Creates a new file on disk. Fails if path already exists
|
|
3191
|
+
*
|
|
3192
|
+
* @param data Data to write into the file
|
|
3193
|
+
* @param path Path to write the file to
|
|
3194
|
+
* @param schema Schema of the file to validate against
|
|
3195
|
+
* @returns Validated content of the file from disk
|
|
3196
|
+
*/
|
|
3197
|
+
async create(data, path, schema) {
|
|
3198
|
+
const parsedData = schema.parse(data);
|
|
3199
|
+
const string = this.serialize(parsedData);
|
|
3200
|
+
await Fs.writeFile(path, string, {
|
|
3201
|
+
flag: "wx",
|
|
3202
|
+
encoding: "utf8"
|
|
3203
|
+
});
|
|
3204
|
+
if (this.options.file.cache === true) this.cache.set(path, parsedData);
|
|
3205
|
+
this.logService.debug({
|
|
3206
|
+
source: "core",
|
|
3207
|
+
message: `Created file "${path}"`
|
|
3208
|
+
});
|
|
3209
|
+
return parsedData;
|
|
3210
|
+
}
|
|
3211
|
+
/**
|
|
3212
|
+
* Reads the content of a file on disk. Fails if path does not exist
|
|
3213
|
+
*
|
|
3214
|
+
* @param path Path to read the file from
|
|
3215
|
+
* @param schema Schema of the file to validate against
|
|
3216
|
+
* @returns Validated content of the file from disk
|
|
3217
|
+
*/
|
|
3218
|
+
async read(path, schema) {
|
|
3219
|
+
if (this.options.file.cache === true && this.cache.has(path)) {
|
|
3220
|
+
this.logService.debug({
|
|
3221
|
+
source: "core",
|
|
3222
|
+
message: `Cache hit reading file "${path}"`
|
|
3223
|
+
});
|
|
3224
|
+
const json$1 = this.cache.get(path);
|
|
3225
|
+
return schema.parse(json$1);
|
|
3226
|
+
}
|
|
3227
|
+
this.logService.debug({
|
|
3228
|
+
source: "core",
|
|
3229
|
+
message: `Cache miss reading file "${path}"`
|
|
3230
|
+
});
|
|
3231
|
+
const data = await Fs.readFile(path, {
|
|
3232
|
+
flag: "r",
|
|
3233
|
+
encoding: "utf8"
|
|
3234
|
+
});
|
|
3235
|
+
const json = this.deserialize(data);
|
|
3236
|
+
const parsedData = schema.parse(json);
|
|
3237
|
+
if (this.options.file.cache === true) this.cache.set(path, parsedData);
|
|
3238
|
+
return parsedData;
|
|
3239
|
+
}
|
|
3240
|
+
/**
|
|
3241
|
+
* Reads the content of a file on disk. Fails if path does not exist.
|
|
3242
|
+
* Does not validate the content of the file against a schema and
|
|
3243
|
+
* therefore is only to be used when retrieving data we do not have
|
|
3244
|
+
* a current schema for. E.g. reading from history or while upgrading
|
|
3245
|
+
* the old schema of a file to a new, current schema.
|
|
3246
|
+
*
|
|
3247
|
+
* Does not read from or write to cache.
|
|
3248
|
+
*
|
|
3249
|
+
* @param path Path to read the file from
|
|
3250
|
+
* @returns Unvalidated content of the file from disk
|
|
3251
|
+
*/
|
|
3252
|
+
async unsafeRead(path) {
|
|
3253
|
+
this.logService.warn({
|
|
3254
|
+
source: "core",
|
|
3255
|
+
message: `Unsafe reading of file "${path}"`
|
|
3256
|
+
});
|
|
3257
|
+
const data = await Fs.readFile(path, {
|
|
3258
|
+
flag: "r",
|
|
3259
|
+
encoding: "utf8"
|
|
3260
|
+
});
|
|
3261
|
+
return this.deserialize(data);
|
|
3262
|
+
}
|
|
3263
|
+
/**
|
|
3264
|
+
* Overwrites an existing file on disk
|
|
3265
|
+
*
|
|
3266
|
+
* @todo Check how to error out if the file does not exist already
|
|
3267
|
+
*
|
|
3268
|
+
* @param data Data to write into the file
|
|
3269
|
+
* @param path Path to the file to overwrite
|
|
3270
|
+
* @param schema Schema of the file to validate against
|
|
3271
|
+
* @returns Validated content of the file from disk
|
|
3272
|
+
*/
|
|
3273
|
+
async update(data, path, schema) {
|
|
3274
|
+
const parsedData = schema.parse(data);
|
|
3275
|
+
const string = this.serialize(parsedData);
|
|
3276
|
+
await Fs.writeFile(path, string, {
|
|
3277
|
+
flag: "w",
|
|
3278
|
+
encoding: "utf8"
|
|
3279
|
+
});
|
|
3280
|
+
if (this.options.file.cache === true) this.cache.set(path, parsedData);
|
|
3281
|
+
this.logService.debug({
|
|
3282
|
+
source: "core",
|
|
3283
|
+
message: `Updated file "${path}"`
|
|
3284
|
+
});
|
|
3285
|
+
return parsedData;
|
|
3286
|
+
}
|
|
3287
|
+
serialize(data) {
|
|
3288
|
+
return JSON.stringify(data, null, 2);
|
|
3289
|
+
}
|
|
3290
|
+
deserialize(data) {
|
|
3291
|
+
return JSON.parse(data);
|
|
3292
|
+
}
|
|
3293
|
+
};
|
|
3294
|
+
|
|
3295
|
+
//#endregion
|
|
3296
|
+
//#region src/service/LogService.ts
|
|
3297
|
+
/**
|
|
3298
|
+
* Service that handles logging to file and console
|
|
3299
|
+
*/
|
|
3300
|
+
var LogService = class {
|
|
3301
|
+
logger;
|
|
3302
|
+
constructor(options) {
|
|
3303
|
+
const rotatingFileTransport = new DailyRotateFile({
|
|
3304
|
+
dirname: pathTo.logs,
|
|
3305
|
+
filename: "%DATE%.log",
|
|
3306
|
+
datePattern: "YYYY-MM-DD",
|
|
3307
|
+
zippedArchive: true,
|
|
3308
|
+
maxFiles: "30d",
|
|
3309
|
+
handleExceptions: true,
|
|
3310
|
+
handleRejections: true,
|
|
3311
|
+
format: format.combine(format.timestamp(), format.json())
|
|
3312
|
+
});
|
|
3313
|
+
rotatingFileTransport.on("rotate", (oldFilename, newFilename) => {
|
|
3314
|
+
this.info({
|
|
3315
|
+
message: `Rotated log file from ${oldFilename} to ${newFilename}`,
|
|
3316
|
+
source: "core"
|
|
3317
|
+
});
|
|
3318
|
+
});
|
|
3319
|
+
rotatingFileTransport.on("error", (error) => {
|
|
3320
|
+
this.error({
|
|
3321
|
+
message: `Error rotating log file: ${error.message}`,
|
|
3322
|
+
source: "core",
|
|
3323
|
+
meta: { error }
|
|
3324
|
+
});
|
|
3325
|
+
});
|
|
3326
|
+
const consoleTransport = new transports.Console({
|
|
3327
|
+
handleExceptions: true,
|
|
3328
|
+
handleRejections: true,
|
|
3329
|
+
format: format.combine(format.colorize(), format.timestamp({ format: "HH:mm:ss" }), format.printf((props) => {
|
|
3330
|
+
const { timestamp, level, source, message } = logConsoleTransportSchema.parse({
|
|
3331
|
+
...props[Symbol.for("splat")][0],
|
|
3332
|
+
timestamp: props["timestamp"],
|
|
3333
|
+
level: props.level,
|
|
3334
|
+
message: props.message
|
|
3335
|
+
});
|
|
3336
|
+
return `${timestamp} [${source}] ${level}: ${message}`;
|
|
3337
|
+
}))
|
|
3338
|
+
});
|
|
3339
|
+
this.logger = createLogger({
|
|
3340
|
+
level: options.log.level,
|
|
3341
|
+
transports: [rotatingFileTransport, consoleTransport]
|
|
3342
|
+
});
|
|
3343
|
+
}
|
|
3344
|
+
debug(props) {
|
|
3345
|
+
const { source, message, meta } = logSchema.parse(props);
|
|
3346
|
+
this.logger.debug(message, {
|
|
3347
|
+
source,
|
|
3348
|
+
meta
|
|
3349
|
+
});
|
|
3350
|
+
}
|
|
3351
|
+
info(props) {
|
|
3352
|
+
const { source, message, meta } = logSchema.parse(props);
|
|
3353
|
+
this.logger.info(message, {
|
|
3354
|
+
source,
|
|
3355
|
+
meta
|
|
3356
|
+
});
|
|
3357
|
+
}
|
|
3358
|
+
warn(props) {
|
|
3359
|
+
const { source, message, meta } = logSchema.parse(props);
|
|
3360
|
+
this.logger.warn(message, {
|
|
3361
|
+
source,
|
|
3362
|
+
meta
|
|
3363
|
+
});
|
|
3364
|
+
}
|
|
3365
|
+
error(props) {
|
|
3366
|
+
const { source, message, meta } = logSchema.parse(props);
|
|
3367
|
+
this.logger.error(message, {
|
|
3368
|
+
source,
|
|
3369
|
+
meta
|
|
3370
|
+
});
|
|
3371
|
+
}
|
|
3372
|
+
};
|
|
3373
|
+
|
|
3374
|
+
//#endregion
|
|
3375
|
+
//#region src/error/RemoteOriginMissingError.ts
|
|
3376
|
+
var RemoteOriginMissingError = class extends Error {
|
|
3377
|
+
constructor(projectId) {
|
|
3378
|
+
super(`Tried to delete Project "${projectId}" but it does not have a remote origin. Deleting a Project without a remote origin could lead to data loss. Use the "force" option to delete it anyway.`);
|
|
3379
|
+
this.name = "RemoteOriginMissingError";
|
|
3380
|
+
}
|
|
3381
|
+
};
|
|
3382
|
+
|
|
3383
|
+
//#endregion
|
|
3384
|
+
//#region src/error/SynchronizeLocalChangesError.ts
|
|
3385
|
+
var SynchronizeLocalChangesError = class extends Error {
|
|
3386
|
+
constructor(projectId) {
|
|
3387
|
+
super(`Tried to delete Project "${projectId}" but it has local changes that are not yet pushed to the remote origin. Deleting a Project with local changes could lead to data loss. Use the "force" option to delete it anyway.`);
|
|
3388
|
+
this.name = "SynchronizeLocalChangesError";
|
|
3389
|
+
}
|
|
3390
|
+
};
|
|
3391
|
+
|
|
3392
|
+
//#endregion
|
|
3393
|
+
//#region src/service/ProjectService.ts
|
|
3394
|
+
/**
|
|
3395
|
+
* Service that manages CRUD functionality for Project files on disk
|
|
3396
|
+
*/
|
|
3397
|
+
var ProjectService = class extends AbstractCrudService {
|
|
3398
|
+
coreVersion;
|
|
3399
|
+
jsonFileService;
|
|
3400
|
+
userService;
|
|
3401
|
+
gitService;
|
|
3402
|
+
assetService;
|
|
3403
|
+
collectionService;
|
|
3404
|
+
entryService;
|
|
3405
|
+
constructor(coreVersion, options, logService, jsonFileService, userService, gitService, assetService, collectionService, entryService) {
|
|
3406
|
+
super(serviceTypeSchema.enum.Project, options, logService);
|
|
3407
|
+
this.coreVersion = coreVersion;
|
|
3408
|
+
this.jsonFileService = jsonFileService;
|
|
3409
|
+
this.userService = userService;
|
|
3410
|
+
this.gitService = gitService;
|
|
3411
|
+
this.assetService = assetService;
|
|
3412
|
+
this.collectionService = collectionService;
|
|
3413
|
+
this.entryService = entryService;
|
|
3414
|
+
}
|
|
3415
|
+
/**
|
|
3416
|
+
* Creates a new Project
|
|
3417
|
+
*/
|
|
3418
|
+
async create(props) {
|
|
3419
|
+
createProjectSchema.parse(props);
|
|
3420
|
+
const user = await this.userService.get();
|
|
3421
|
+
if (!user) throw new NoCurrentUserError();
|
|
3422
|
+
const id = uuid();
|
|
3423
|
+
const defaultSettings = { language: {
|
|
3424
|
+
default: user.language,
|
|
3425
|
+
supported: [user.language]
|
|
3426
|
+
} };
|
|
3427
|
+
const projectFile = {
|
|
3428
|
+
...props,
|
|
3429
|
+
objectType: "project",
|
|
3430
|
+
id,
|
|
3431
|
+
description: props.description || "",
|
|
3432
|
+
settings: Object.assign({}, defaultSettings, props.settings),
|
|
3433
|
+
created: datetime(),
|
|
3434
|
+
updated: null,
|
|
3435
|
+
coreVersion: this.coreVersion,
|
|
3436
|
+
status: "todo",
|
|
3437
|
+
version: "0.0.1"
|
|
3438
|
+
};
|
|
3439
|
+
const projectPath = pathTo.project(id);
|
|
3440
|
+
await Fs.ensureDir(projectPath);
|
|
3441
|
+
try {
|
|
3442
|
+
await this.createFolderStructure(projectPath);
|
|
3443
|
+
await this.createGitignore(projectPath);
|
|
3444
|
+
await this.gitService.init(projectPath, { initialBranch: projectBranchSchema.enum.production });
|
|
3445
|
+
await this.jsonFileService.create(projectFile, pathTo.projectFile(id), projectFileSchema);
|
|
3446
|
+
await this.gitService.add(projectPath, ["."]);
|
|
3447
|
+
await this.gitService.commit(projectPath, {
|
|
3448
|
+
method: "create",
|
|
3449
|
+
reference: {
|
|
3450
|
+
objectType: "project",
|
|
3451
|
+
id
|
|
3452
|
+
}
|
|
3453
|
+
});
|
|
3454
|
+
await this.gitService.branches.switch(projectPath, projectBranchSchema.enum.work, { isNew: true });
|
|
3455
|
+
} catch (error) {
|
|
3456
|
+
await this.delete({
|
|
3457
|
+
id,
|
|
3458
|
+
force: true
|
|
3459
|
+
});
|
|
3460
|
+
throw error;
|
|
3461
|
+
}
|
|
3462
|
+
return await this.toProject(projectFile);
|
|
3463
|
+
}
|
|
3464
|
+
/**
|
|
3465
|
+
* Clones a Project by URL
|
|
3466
|
+
*/
|
|
3467
|
+
async clone(props) {
|
|
3468
|
+
cloneProjectSchema.parse(props);
|
|
3469
|
+
const tmpId = uuid();
|
|
3470
|
+
const tmpProjectPath = Path.join(pathTo.tmp, tmpId);
|
|
3471
|
+
await this.gitService.clone(props.url, tmpProjectPath);
|
|
3472
|
+
const projectFile = await this.jsonFileService.read(Path.join(tmpProjectPath, "project.json"), projectFileSchema);
|
|
3473
|
+
const projectPath = pathTo.project(projectFile.id);
|
|
3474
|
+
if (await Fs.pathExists(projectPath)) throw new Error(`Tried to clone Project "${projectFile.id}" from "${props.url}" - but the Project already exists locally`);
|
|
3475
|
+
await Fs.copy(tmpProjectPath, projectPath);
|
|
3476
|
+
await Fs.remove(tmpProjectPath);
|
|
3477
|
+
return await this.toProject(projectFile);
|
|
3478
|
+
}
|
|
3479
|
+
/**
|
|
3480
|
+
* Returns a Project by ID
|
|
3481
|
+
*
|
|
3482
|
+
* If a commit hash is provided, the Project is read from history
|
|
3483
|
+
*/
|
|
3484
|
+
async read(props) {
|
|
3485
|
+
readProjectSchema.parse(props);
|
|
3486
|
+
if (!props.commitHash) {
|
|
3487
|
+
const projectFile = await this.jsonFileService.read(pathTo.projectFile(props.id), projectFileSchema);
|
|
3488
|
+
return await this.toProject(projectFile);
|
|
3489
|
+
} else {
|
|
3490
|
+
const projectFile = this.migrate(JSON.parse(await this.gitService.getFileContentAtCommit(pathTo.project(props.id), pathTo.projectFile(props.id), props.commitHash)));
|
|
3491
|
+
return await this.toProject(projectFile);
|
|
3492
|
+
}
|
|
3493
|
+
}
|
|
3494
|
+
/**
|
|
3495
|
+
* Updates given Project
|
|
3496
|
+
*/
|
|
3497
|
+
async update(props) {
|
|
3498
|
+
updateProjectSchema.parse(props);
|
|
3499
|
+
const projectPath = pathTo.project(props.id);
|
|
3500
|
+
const filePath = pathTo.projectFile(props.id);
|
|
3501
|
+
const prevProjectFile = await this.read(props);
|
|
3502
|
+
const projectFile = {
|
|
3503
|
+
...prevProjectFile,
|
|
3504
|
+
name: props.name || prevProjectFile.name,
|
|
3505
|
+
description: props.description || prevProjectFile.description,
|
|
3506
|
+
coreVersion: this.coreVersion,
|
|
3507
|
+
settings: { language: {
|
|
3508
|
+
supported: props.settings?.language.supported || prevProjectFile.settings.language.supported,
|
|
3509
|
+
default: props.settings?.language.default || prevProjectFile.settings.language.default
|
|
3510
|
+
} },
|
|
3511
|
+
updated: datetime()
|
|
3512
|
+
};
|
|
3513
|
+
await this.jsonFileService.update(projectFile, filePath, projectFileSchema);
|
|
3514
|
+
await this.gitService.add(projectPath, [filePath]);
|
|
3515
|
+
await this.gitService.commit(projectPath, {
|
|
3516
|
+
method: "update",
|
|
3517
|
+
reference: {
|
|
3518
|
+
objectType: "project",
|
|
3519
|
+
id: projectFile.id
|
|
3520
|
+
}
|
|
3521
|
+
});
|
|
3522
|
+
return await this.toProject(projectFile);
|
|
3523
|
+
}
|
|
3524
|
+
/**
|
|
3525
|
+
* Upgrades given Project to the current version of Core
|
|
3526
|
+
*
|
|
3527
|
+
* Needed when a new Core version is requiring changes to existing files or structure.
|
|
3528
|
+
*/
|
|
3529
|
+
async upgrade(props) {
|
|
3530
|
+
upgradeProjectSchema.parse(props);
|
|
3531
|
+
const projectPath = pathTo.project(props.id);
|
|
3532
|
+
const projectFilePath = pathTo.projectFile(props.id);
|
|
3533
|
+
if (await this.gitService.branches.current(projectPath) !== projectBranchSchema.enum.work) await this.gitService.branches.switch(projectPath, projectBranchSchema.enum.work);
|
|
3534
|
+
const currentProjectFile = outdatedProjectSchema.passthrough().parse(await this.jsonFileService.unsafeRead(projectFilePath));
|
|
3535
|
+
if (Semver.gt(currentProjectFile.coreVersion, this.coreVersion)) throw new ProjectUpgradeError(`The Projects Core version "${currentProjectFile.coreVersion}" is higher than the current Core version "${this.coreVersion}".`);
|
|
3536
|
+
if (Semver.eq(currentProjectFile.coreVersion, this.coreVersion) && props.force !== true) throw new ProjectUpgradeError(`The Projects Core version "${currentProjectFile.coreVersion}" is already up to date.`);
|
|
3537
|
+
const assetReferences = await this.listReferences("asset", props.id);
|
|
3538
|
+
const collectionReferences = await this.listReferences("collection", props.id);
|
|
3539
|
+
this.logService.info({
|
|
3540
|
+
source: "core",
|
|
3541
|
+
message: `Attempting to upgrade Project "${props.id}" from Core version ${currentProjectFile.coreVersion} to ${this.coreVersion}`
|
|
3542
|
+
});
|
|
3543
|
+
const upgradeBranchName = `upgrade/core-${currentProjectFile.coreVersion}-to-${this.coreVersion}`;
|
|
3544
|
+
await this.gitService.branches.switch(projectPath, upgradeBranchName, { isNew: true });
|
|
3545
|
+
try {
|
|
3546
|
+
await Promise.all(assetReferences.map(async (reference) => {
|
|
3547
|
+
await this.upgradeObjectFile(props.id, "asset", reference);
|
|
3548
|
+
}));
|
|
3549
|
+
await Promise.all(collectionReferences.map(async (reference) => {
|
|
3550
|
+
await this.upgradeObjectFile(props.id, "collection", reference);
|
|
3551
|
+
}));
|
|
3552
|
+
await Promise.all(collectionReferences.map(async (collectionReference) => {
|
|
3553
|
+
const entryReferences = await this.listReferences("entry", props.id, collectionReference.id);
|
|
3554
|
+
await Promise.all(entryReferences.map(async (reference) => {
|
|
3555
|
+
await this.upgradeObjectFile(props.id, "entry", reference, collectionReference.id);
|
|
3556
|
+
}));
|
|
3557
|
+
}));
|
|
3558
|
+
const migratedProjectFile = this.migrate(currentProjectFile);
|
|
3559
|
+
await this.update(migratedProjectFile);
|
|
3560
|
+
await this.gitService.branches.switch(projectPath, projectBranchSchema.enum.work);
|
|
3561
|
+
await this.gitService.merge(projectPath, upgradeBranchName, { squash: true });
|
|
3562
|
+
await this.gitService.commit(projectPath, {
|
|
3563
|
+
method: "upgrade",
|
|
3564
|
+
reference: {
|
|
3565
|
+
objectType: "project",
|
|
3566
|
+
id: migratedProjectFile.id
|
|
3567
|
+
}
|
|
3568
|
+
});
|
|
3569
|
+
await this.gitService.tags.create({
|
|
3570
|
+
path: projectPath,
|
|
3571
|
+
message: `Upgraded Project to Core version ${migratedProjectFile.coreVersion}`
|
|
3572
|
+
});
|
|
3573
|
+
await this.gitService.branches.delete(projectPath, upgradeBranchName, true);
|
|
3574
|
+
this.logService.info({
|
|
3575
|
+
source: "core",
|
|
3576
|
+
message: `Successfully upgraded Project "${props.id}" to Core version "${this.coreVersion}"`,
|
|
3577
|
+
meta: {
|
|
3578
|
+
previous: currentProjectFile,
|
|
3579
|
+
migrated: migratedProjectFile
|
|
3580
|
+
}
|
|
3581
|
+
});
|
|
3582
|
+
} catch (error) {
|
|
3583
|
+
await this.gitService.branches.switch(projectPath, projectBranchSchema.enum.work);
|
|
3584
|
+
await this.gitService.branches.delete(projectPath, upgradeBranchName, true);
|
|
3585
|
+
throw error;
|
|
3586
|
+
}
|
|
3587
|
+
}
|
|
3588
|
+
branches = {
|
|
3589
|
+
list: async (props) => {
|
|
3590
|
+
listBranchesProjectSchema.parse(props);
|
|
3591
|
+
const projectPath = pathTo.project(props.id);
|
|
3592
|
+
if (await this.gitService.remotes.hasOrigin(projectPath)) await this.gitService.fetch(projectPath);
|
|
3593
|
+
return await this.gitService.branches.list(projectPath);
|
|
3594
|
+
},
|
|
3595
|
+
current: async (props) => {
|
|
3596
|
+
currentBranchProjectSchema.parse(props);
|
|
3597
|
+
const projectPath = pathTo.project(props.id);
|
|
3598
|
+
return await this.gitService.branches.current(projectPath);
|
|
3599
|
+
},
|
|
3600
|
+
switch: async (props) => {
|
|
3601
|
+
switchBranchProjectSchema.parse(props);
|
|
3602
|
+
const projectPath = pathTo.project(props.id);
|
|
3603
|
+
return await this.gitService.branches.switch(projectPath, props.branch, props.options);
|
|
3604
|
+
}
|
|
3605
|
+
};
|
|
3606
|
+
/**
|
|
3607
|
+
* Updates the remote origin URL of given Project
|
|
3608
|
+
*
|
|
3609
|
+
* @todo maybe add this logic to the update method
|
|
3610
|
+
*/
|
|
3611
|
+
async setRemoteOriginUrl(props) {
|
|
3612
|
+
setRemoteOriginUrlProjectSchema.parse(props);
|
|
3613
|
+
const projectPath = pathTo.project(props.id);
|
|
3614
|
+
if (!await this.gitService.remotes.hasOrigin(projectPath)) await this.gitService.remotes.addOrigin(projectPath, props.url);
|
|
3615
|
+
else await this.gitService.remotes.setOriginUrl(projectPath, props.url);
|
|
3616
|
+
}
|
|
3617
|
+
/**
|
|
3618
|
+
* Returns the differences of the given Projects current branch
|
|
3619
|
+
* between the local and remote `origin` (commits ahead & behind)
|
|
3620
|
+
*
|
|
3621
|
+
* Throws an error if the Project does not have a remote origin.
|
|
3622
|
+
*
|
|
3623
|
+
* - `behind` contains a list of commits on the current branch that are available on the remote `origin` but not yet locally
|
|
3624
|
+
* - `ahead` contains a list of commits on the current branch that are available locally but not yet on the remote `origin`
|
|
3625
|
+
*/
|
|
3626
|
+
async getChanges(props) {
|
|
3627
|
+
getChangesProjectSchema.parse(props);
|
|
3628
|
+
const projectPath = pathTo.project(props.id);
|
|
3629
|
+
if (await this.gitService.remotes.hasOrigin(projectPath) === false) throw new Error(`Project "${props.id}" does not have a remote origin`);
|
|
3630
|
+
const currentBranch = await this.gitService.branches.current(projectPath);
|
|
3631
|
+
await this.gitService.fetch(projectPath);
|
|
3632
|
+
return {
|
|
3633
|
+
behind: await this.gitService.log(projectPath, { between: {
|
|
3634
|
+
from: currentBranch,
|
|
3635
|
+
to: `origin/${currentBranch}`
|
|
3636
|
+
} }),
|
|
3637
|
+
ahead: await this.gitService.log(projectPath, { between: {
|
|
3638
|
+
from: `origin/${currentBranch}`,
|
|
3639
|
+
to: currentBranch
|
|
3640
|
+
} })
|
|
3641
|
+
};
|
|
3642
|
+
}
|
|
3643
|
+
/**
|
|
3644
|
+
* Pulls remote changes of `origin` down to the local repository
|
|
3645
|
+
* and then pushes local commits to the upstream branch
|
|
3646
|
+
*/
|
|
3647
|
+
async synchronize(props) {
|
|
3648
|
+
synchronizeProjectSchema.parse(props);
|
|
3649
|
+
const projectPath = pathTo.project(props.id);
|
|
3650
|
+
await this.gitService.pull(projectPath);
|
|
3651
|
+
await this.gitService.push(projectPath);
|
|
3652
|
+
}
|
|
3653
|
+
/**
|
|
3654
|
+
* Deletes given Project
|
|
3655
|
+
*
|
|
3656
|
+
* Deletes the whole Project folder including the history, not only the config file.
|
|
3657
|
+
* Throws in case a Project is only available locally and could be lost forever,
|
|
3658
|
+
* or changes are not pushed to a remote yet.
|
|
3659
|
+
*/
|
|
3660
|
+
async delete(props) {
|
|
3661
|
+
deleteProjectSchema.parse(props);
|
|
3662
|
+
const hasRemoteOrigin = await this.gitService.remotes.hasOrigin(pathTo.project(props.id));
|
|
3663
|
+
if (hasRemoteOrigin === false && props.force !== true) throw new RemoteOriginMissingError(props.id);
|
|
3664
|
+
if (hasRemoteOrigin === true && props.force !== true) {
|
|
3665
|
+
if ((await this.getChanges({ id: props.id })).ahead.length > 0) throw new SynchronizeLocalChangesError(props.id);
|
|
3666
|
+
}
|
|
3667
|
+
await Fs.remove(pathTo.project(props.id));
|
|
3668
|
+
}
|
|
3669
|
+
/**
|
|
3670
|
+
* Lists outdated Projects that need to be upgraded
|
|
3671
|
+
*/
|
|
3672
|
+
async listOutdated() {
|
|
3673
|
+
const projectReferences = await this.listReferences(objectTypeSchema.enum.project);
|
|
3674
|
+
return (await Promise.all(projectReferences.map(async (reference) => {
|
|
3675
|
+
const json = await this.jsonFileService.unsafeRead(pathTo.projectFile(reference.id));
|
|
3676
|
+
const projectFile = outdatedProjectSchema.parse(json);
|
|
3677
|
+
if (projectFile.coreVersion !== this.coreVersion) return projectFile;
|
|
3678
|
+
return null;
|
|
3679
|
+
}))).filter(isNotEmpty);
|
|
3680
|
+
}
|
|
3681
|
+
async list(props) {
|
|
3682
|
+
if (props) listProjectsSchema.parse(props);
|
|
3683
|
+
const offset = props?.offset || 0;
|
|
3684
|
+
const limit = props?.limit || 15;
|
|
3685
|
+
const projectReferences = await this.listReferences(objectTypeSchema.enum.project);
|
|
3686
|
+
const partialProjectReferences = projectReferences.slice(offset, limit);
|
|
3687
|
+
const projects = await this.returnResolved(partialProjectReferences.map((reference) => {
|
|
3688
|
+
return this.read({ id: reference.id });
|
|
3689
|
+
}));
|
|
3690
|
+
return {
|
|
3691
|
+
total: projectReferences.length,
|
|
3692
|
+
limit,
|
|
3693
|
+
offset,
|
|
3694
|
+
list: projects
|
|
3695
|
+
};
|
|
3696
|
+
}
|
|
3697
|
+
async count() {
|
|
3698
|
+
return (await this.listReferences(objectTypeSchema.enum.project)).length;
|
|
3699
|
+
}
|
|
3700
|
+
/**
|
|
3701
|
+
* Checks if given object is of type Project
|
|
3702
|
+
*/
|
|
3703
|
+
isProject(obj) {
|
|
3704
|
+
return projectFileSchema.safeParse(obj).success;
|
|
3705
|
+
}
|
|
3706
|
+
/**
|
|
3707
|
+
* Migrates an potentially outdated Project file to the current schema
|
|
3708
|
+
*/
|
|
3709
|
+
migrate(potentiallyOutdatedProjectFile) {
|
|
3710
|
+
return projectFileSchema.parse(potentiallyOutdatedProjectFile);
|
|
3711
|
+
}
|
|
3712
|
+
/**
|
|
3713
|
+
* Creates a Project from given ProjectFile
|
|
3714
|
+
*/
|
|
3715
|
+
async toProject(projectFile) {
|
|
3716
|
+
const projectPath = pathTo.project(projectFile.id);
|
|
3717
|
+
let remoteOriginUrl = null;
|
|
3718
|
+
if (await this.gitService.remotes.hasOrigin(projectPath)) remoteOriginUrl = await this.gitService.remotes.getOriginUrl(projectPath);
|
|
3719
|
+
const fullHistory = await this.gitService.log(pathTo.project(projectFile.id));
|
|
3720
|
+
const history = await this.gitService.log(pathTo.project(projectFile.id), { filePath: pathTo.projectFile(projectFile.id) });
|
|
3721
|
+
return {
|
|
3722
|
+
...projectFile,
|
|
3723
|
+
remoteOriginUrl,
|
|
3724
|
+
history,
|
|
3725
|
+
fullHistory
|
|
3726
|
+
};
|
|
3727
|
+
}
|
|
3728
|
+
/**
|
|
3729
|
+
* Creates the projects folder structure and makes sure to
|
|
3730
|
+
* write empty .gitkeep files inside them to ensure they are
|
|
3731
|
+
* committed
|
|
3732
|
+
*/
|
|
3733
|
+
async createFolderStructure(path) {
|
|
3734
|
+
const folders$1 = Object.values(projectFolderSchema.enum);
|
|
3735
|
+
await Promise.all(folders$1.map(async (folder) => {
|
|
3736
|
+
await Fs.mkdirp(Path.join(path, folder));
|
|
3737
|
+
await Fs.writeFile(Path.join(path, folder, ".gitkeep"), "");
|
|
3738
|
+
}));
|
|
3739
|
+
}
|
|
3740
|
+
/**
|
|
3741
|
+
* Writes the Projects main .gitignore file to disk
|
|
3742
|
+
*
|
|
3743
|
+
* @todo Add general things to ignore
|
|
3744
|
+
* @see https://github.com/github/gitignore/tree/master/Global
|
|
3745
|
+
*/
|
|
3746
|
+
async createGitignore(path) {
|
|
3747
|
+
await Fs.writeFile(Path.join(path, ".gitignore"), [
|
|
3748
|
+
"# Ignore all hidden files and folders...",
|
|
3749
|
+
".*",
|
|
3750
|
+
"# ...but these",
|
|
3751
|
+
"!/.gitignore",
|
|
3752
|
+
"!/.gitattributes",
|
|
3753
|
+
"!/**/.gitkeep",
|
|
3754
|
+
"",
|
|
3755
|
+
"# elek.io related ignores"
|
|
3756
|
+
].join(Os.EOL));
|
|
3757
|
+
}
|
|
3758
|
+
async upgradeObjectFile(projectId, objectType, reference, collectionId) {
|
|
3759
|
+
switch (objectType) {
|
|
3760
|
+
case "asset": {
|
|
3761
|
+
const assetFilePath = pathTo.assetFile(projectId, reference.id);
|
|
3762
|
+
const prevAssetFile = await this.jsonFileService.unsafeRead(assetFilePath);
|
|
3763
|
+
const migratedAssetFile = this.assetService.migrate(prevAssetFile);
|
|
3764
|
+
await this.assetService.update({
|
|
3765
|
+
projectId,
|
|
3766
|
+
...migratedAssetFile
|
|
3767
|
+
});
|
|
3768
|
+
this.logService.info({
|
|
3769
|
+
source: "core",
|
|
3770
|
+
message: `Upgraded ${objectType} "${assetFilePath}"`,
|
|
3771
|
+
meta: {
|
|
3772
|
+
previous: prevAssetFile,
|
|
3773
|
+
migrated: migratedAssetFile
|
|
3774
|
+
}
|
|
3775
|
+
});
|
|
3776
|
+
return;
|
|
3777
|
+
}
|
|
3778
|
+
case "collection": {
|
|
3779
|
+
const collectionFilePath = pathTo.collectionFile(projectId, reference.id);
|
|
3780
|
+
const prevCollectionFile = await this.jsonFileService.unsafeRead(collectionFilePath);
|
|
3781
|
+
const migratedCollectionFile = this.collectionService.migrate(prevCollectionFile);
|
|
3782
|
+
await this.collectionService.update({
|
|
3783
|
+
projectId,
|
|
3784
|
+
...migratedCollectionFile
|
|
3785
|
+
});
|
|
3786
|
+
this.logService.info({
|
|
3787
|
+
source: "core",
|
|
3788
|
+
message: `Upgraded ${objectType} "${collectionFilePath}"`,
|
|
3789
|
+
meta: {
|
|
3790
|
+
previous: prevCollectionFile,
|
|
3791
|
+
migrated: migratedCollectionFile
|
|
3792
|
+
}
|
|
3793
|
+
});
|
|
3794
|
+
return;
|
|
3795
|
+
}
|
|
3796
|
+
case "entry": {
|
|
3797
|
+
if (!collectionId) throw new RequiredParameterMissingError("collectionId");
|
|
3798
|
+
const entryFilePath = pathTo.entryFile(projectId, collectionId, reference.id);
|
|
3799
|
+
const prevEntryFile = await this.jsonFileService.unsafeRead(entryFilePath);
|
|
3800
|
+
const migratedEntryFile = this.entryService.migrate(prevEntryFile);
|
|
3801
|
+
await this.entryService.update({
|
|
3802
|
+
projectId,
|
|
3803
|
+
collectionId,
|
|
3804
|
+
...migratedEntryFile
|
|
3805
|
+
});
|
|
3806
|
+
this.logService.info({
|
|
3807
|
+
source: "core",
|
|
3808
|
+
message: `Upgraded ${objectType} "${entryFilePath}"`,
|
|
3809
|
+
meta: {
|
|
3810
|
+
previous: prevEntryFile,
|
|
3811
|
+
migrated: migratedEntryFile
|
|
3812
|
+
}
|
|
3813
|
+
});
|
|
3814
|
+
return;
|
|
3815
|
+
}
|
|
3816
|
+
default: throw new Error(`Trying to upgrade unsupported object file of type "${objectType}"`);
|
|
3817
|
+
}
|
|
3818
|
+
}
|
|
3819
|
+
};
|
|
3820
|
+
|
|
3821
|
+
//#endregion
|
|
3822
|
+
//#region src/service/UserService.ts
|
|
3823
|
+
/**
|
|
3824
|
+
* Service to handle the User that is currently working with Core
|
|
3825
|
+
*/
|
|
3826
|
+
var UserService = class {
|
|
3827
|
+
logService;
|
|
3828
|
+
jsonFileService;
|
|
3829
|
+
constructor(logService, jsonFileService) {
|
|
3830
|
+
this.logService = logService;
|
|
3831
|
+
this.jsonFileService = jsonFileService;
|
|
3832
|
+
}
|
|
3833
|
+
/**
|
|
3834
|
+
* Returns the User currently working with Core
|
|
3835
|
+
*/
|
|
3836
|
+
async get() {
|
|
3837
|
+
try {
|
|
3838
|
+
return await this.jsonFileService.read(pathTo.userFile, userFileSchema);
|
|
3839
|
+
} catch {
|
|
3840
|
+
this.logService.info({
|
|
3841
|
+
source: "core",
|
|
3842
|
+
message: "No User found"
|
|
3843
|
+
});
|
|
3844
|
+
return null;
|
|
3845
|
+
}
|
|
3846
|
+
}
|
|
3847
|
+
/**
|
|
3848
|
+
* Sets the User currently working with Core
|
|
3849
|
+
*
|
|
3850
|
+
* By doing so all git operations are done with the signature of this User
|
|
3851
|
+
*/
|
|
3852
|
+
async set(props) {
|
|
3853
|
+
setUserSchema.parse(props);
|
|
3854
|
+
const userFilePath = pathTo.userFile;
|
|
3855
|
+
const userFile = { ...props };
|
|
3856
|
+
if (userFile.userType === UserTypeSchema.enum.cloud) {}
|
|
3857
|
+
await this.jsonFileService.update(userFile, userFilePath, userFileSchema);
|
|
3858
|
+
this.logService.debug({
|
|
3859
|
+
source: "core",
|
|
3860
|
+
message: "Updated User"
|
|
3861
|
+
});
|
|
3862
|
+
return userFile;
|
|
3863
|
+
}
|
|
3864
|
+
};
|
|
3865
|
+
|
|
3866
|
+
//#endregion
|
|
3867
|
+
//#region src/index.node.ts
|
|
3868
|
+
/**
|
|
3869
|
+
* elek.io Core
|
|
3870
|
+
*
|
|
3871
|
+
* Provides access to all services Core is offering
|
|
3872
|
+
*/
|
|
3873
|
+
var ElekIoCore = class {
|
|
3874
|
+
coreVersion;
|
|
3875
|
+
options;
|
|
3876
|
+
logService;
|
|
3877
|
+
userService;
|
|
3878
|
+
gitService;
|
|
3879
|
+
jsonFileService;
|
|
3880
|
+
assetService;
|
|
3881
|
+
projectService;
|
|
3882
|
+
collectionService;
|
|
3883
|
+
entryService;
|
|
3884
|
+
localApi;
|
|
3885
|
+
constructor(props) {
|
|
3886
|
+
this.coreVersion = package_default.version;
|
|
3887
|
+
const parsedProps = constructorElekIoCoreSchema.parse(props);
|
|
3888
|
+
this.options = Object.assign({}, {
|
|
3889
|
+
log: { level: "info" },
|
|
3890
|
+
file: { cache: true }
|
|
3891
|
+
}, parsedProps);
|
|
3892
|
+
this.logService = new LogService(this.options);
|
|
3893
|
+
this.jsonFileService = new JsonFileService(this.options, this.logService);
|
|
3894
|
+
this.userService = new UserService(this.logService, this.jsonFileService);
|
|
3895
|
+
this.gitService = new GitService(this.options, this.logService, this.userService);
|
|
3896
|
+
this.assetService = new AssetService(this.options, this.logService, this.jsonFileService, this.gitService);
|
|
3897
|
+
this.collectionService = new CollectionService(this.options, this.logService, this.jsonFileService, this.gitService);
|
|
3898
|
+
this.entryService = new EntryService(this.options, this.logService, this.jsonFileService, this.gitService, this.collectionService);
|
|
3899
|
+
this.projectService = new ProjectService(this.coreVersion, this.options, this.logService, this.jsonFileService, this.userService, this.gitService, this.assetService, this.collectionService, this.entryService);
|
|
3900
|
+
this.localApi = new LocalApi(this.logService, this.projectService, this.collectionService, this.entryService, this.assetService);
|
|
3901
|
+
this.logService.info({
|
|
3902
|
+
source: "core",
|
|
3903
|
+
message: `Initializing elek.io Core ${this.coreVersion}`,
|
|
3904
|
+
meta: { options: this.options }
|
|
3905
|
+
});
|
|
3906
|
+
Fs.mkdirpSync(pathTo.projects);
|
|
3907
|
+
Fs.mkdirpSync(pathTo.tmp);
|
|
3908
|
+
Fs.emptyDirSync(pathTo.tmp);
|
|
3909
|
+
}
|
|
3910
|
+
/**
|
|
3911
|
+
* Exposes the logger
|
|
3912
|
+
*/
|
|
3913
|
+
get logger() {
|
|
3914
|
+
return this.logService;
|
|
3915
|
+
}
|
|
3916
|
+
/**
|
|
3917
|
+
* Utility / helper functions
|
|
3918
|
+
*/
|
|
3919
|
+
get util() {
|
|
3920
|
+
return node_exports;
|
|
3921
|
+
}
|
|
3922
|
+
/**
|
|
3923
|
+
* Exposes git functions
|
|
3924
|
+
*/
|
|
3925
|
+
get git() {
|
|
3926
|
+
return this.gitService;
|
|
3927
|
+
}
|
|
3928
|
+
/**
|
|
3929
|
+
* Getter and setter methods for the User currently working with Core
|
|
3930
|
+
*/
|
|
3931
|
+
get user() {
|
|
3932
|
+
return this.userService;
|
|
3933
|
+
}
|
|
3934
|
+
/**
|
|
3935
|
+
* CRUD methods to work with Projects
|
|
3936
|
+
*/
|
|
3937
|
+
get projects() {
|
|
3938
|
+
return this.projectService;
|
|
3939
|
+
}
|
|
3940
|
+
/**
|
|
3941
|
+
* CRUD methods to work with Assets
|
|
3942
|
+
*/
|
|
3943
|
+
get assets() {
|
|
3944
|
+
return this.assetService;
|
|
3945
|
+
}
|
|
3946
|
+
/**
|
|
3947
|
+
* CRUD methods to work with Collections
|
|
3948
|
+
*/
|
|
3949
|
+
get collections() {
|
|
3950
|
+
return this.collectionService;
|
|
3951
|
+
}
|
|
3952
|
+
/**
|
|
3953
|
+
* CRUD methods to work with Entries
|
|
3954
|
+
*/
|
|
3955
|
+
get entries() {
|
|
3956
|
+
return this.entryService;
|
|
3957
|
+
}
|
|
3958
|
+
/**
|
|
3959
|
+
* Allows starting and stopping a REST API
|
|
3960
|
+
* to allow developers to read local Project data
|
|
3961
|
+
*/
|
|
3962
|
+
get api() {
|
|
3963
|
+
return this.localApi;
|
|
3964
|
+
}
|
|
3965
|
+
};
|
|
3966
|
+
|
|
3967
|
+
//#endregion
|
|
3968
|
+
//#region src/cli/util.ts
|
|
3969
|
+
const core = new ElekIoCore({ log: { level: "info" } });
|
|
3970
|
+
function watchProjects() {
|
|
3971
|
+
return chokidar.watch(core.util.pathTo.projects, {
|
|
3972
|
+
ignoreInitial: true,
|
|
3973
|
+
ignored: (path) => path.includes("/.git/")
|
|
3974
|
+
});
|
|
3975
|
+
}
|
|
3976
|
+
|
|
3977
|
+
//#endregion
|
|
3978
|
+
//#region src/index.cli.ts
|
|
3979
|
+
const program = new Command();
|
|
3980
|
+
program.name("elek").description("CLI for elek.io").version(package_default.version);
|
|
3981
|
+
program.command("generate:client").description("Generates a JS/TS API Client").argument("[outDir]", "The directory to generate the API Client in", "./.elek.io").argument("[language]", "The programming language of the generated API Client. Choose \"ts\" if you bundle it yourself in your TypeScript project, or \"js\" if you want a ready-to-use JavaScript API Client.", "ts").argument("[format]", "The output format of the generated API Client. Choose \"esm\" for ES Modules, or \"cjs\" for CommonJS. This option is only relevant if you choose \"js\" as the language.", "esm").argument("[target]", "The target environment of the generated API Client. Choose this depending on the JavaScript runtime you want to support. This option is only relevant if you choose \"js\" as the language.", "es2020").option("-w, --watch", "Watches for changes in your Projects and regenerates the API Client automatically.").action(async (outDir, language, format$1, target, options) => {
|
|
3982
|
+
await generateApiClientAction(generateApiClientSchema.parse({
|
|
3983
|
+
outDir,
|
|
3984
|
+
language,
|
|
3985
|
+
format: format$1,
|
|
3986
|
+
target,
|
|
3987
|
+
options
|
|
3988
|
+
}));
|
|
3989
|
+
});
|
|
3990
|
+
program.command("api:start").description("Starts the local API").argument("[port]", "The port to run the local API on", "31310").action((port) => {
|
|
3991
|
+
startApiAction(apiStartSchema.parse({ port }));
|
|
3992
|
+
});
|
|
3993
|
+
program.command("export").description("Exports all locally available Projects into a JSON file").argument("[outDir]", "The directory to write the JSON file to", "./.elek.io").argument("[projects]", "One or more Project IDs, separated by commas to export. If not provided, all Projects will be exported.", "all").option("-s, --separate", "Separates the exported Projects into individual files.").option("-w, --watch", "Watches for changes in your Projects and updates the JSON file automatically.").action(async (outDir, projects, options) => {
|
|
3994
|
+
await exportAction(exportSchema.parse({
|
|
3995
|
+
outDir,
|
|
3996
|
+
projects,
|
|
3997
|
+
options
|
|
3998
|
+
}));
|
|
3999
|
+
});
|
|
4000
|
+
await program.parseAsync();
|
|
4001
|
+
|
|
4002
|
+
//#endregion
|
|
4003
|
+
export { };
|
|
4004
|
+
//# sourceMappingURL=index.cli.js.map
|