@elek-io/core 0.15.3 → 0.16.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/astro/index.astro.d.mts +60 -0
- package/dist/astro/index.astro.mjs +3864 -0
- package/dist/astro/index.astro.mjs.map +1 -0
- package/dist/browser/index.browser.d.ts +158 -45
- package/dist/browser/index.browser.js +1 -1
- package/dist/browser/index.browser.js.map +1 -1
- package/dist/cli/{index.cli.js → index.cli.mjs} +292 -148
- package/dist/node/chunk-DQk6qfdC.mjs +18 -0
- package/dist/node/{index.node.d.ts → index.node.d.mts} +203 -90
- package/dist/node/{index.node.js → index.node.mjs} +122 -98
- package/dist/node/index.node.mjs.map +1 -0
- package/package.json +44 -30
- package/dist/node/chunk-Bp6m_JJh.js +0 -13
- package/dist/node/index.node.js.map +0 -1
|
@@ -0,0 +1,3864 @@
|
|
|
1
|
+
import Path from "node:path";
|
|
2
|
+
import Fs from "fs-extra";
|
|
3
|
+
import { serve } from "@hono/node-server";
|
|
4
|
+
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
|
5
|
+
import { requestId } from "hono/request-id";
|
|
6
|
+
import { createMiddleware } from "hono/factory";
|
|
7
|
+
import { cors } from "hono/cors";
|
|
8
|
+
import { trimTrailingSlash } from "hono/trailing-slash";
|
|
9
|
+
import { z as z$1 } from "zod";
|
|
10
|
+
import { Scalar } from "@scalar/hono-api-reference";
|
|
11
|
+
import Os from "node:os";
|
|
12
|
+
import { execFile } from "node:child_process";
|
|
13
|
+
import mime from "mime";
|
|
14
|
+
import slugify from "@sindresorhus/slugify";
|
|
15
|
+
import { v4 } from "uuid";
|
|
16
|
+
import { exec } from "dugite";
|
|
17
|
+
import PQueue from "p-queue";
|
|
18
|
+
import { createLogger, format, transports } from "winston";
|
|
19
|
+
import DailyRotateFile from "winston-daily-rotate-file";
|
|
20
|
+
import Semver from "semver";
|
|
21
|
+
|
|
22
|
+
//#region \0rolldown/runtime.js
|
|
23
|
+
var __defProp = Object.defineProperty;
|
|
24
|
+
var __exportAll = (all, no_symbols) => {
|
|
25
|
+
let target = {};
|
|
26
|
+
for (var name in all) {
|
|
27
|
+
__defProp(target, name, {
|
|
28
|
+
get: all[name],
|
|
29
|
+
enumerable: true
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
if (!no_symbols) {
|
|
33
|
+
__defProp(target, Symbol.toStringTag, { value: "Module" });
|
|
34
|
+
}
|
|
35
|
+
return target;
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
//#endregion
|
|
39
|
+
//#region package.json
|
|
40
|
+
var package_default = {
|
|
41
|
+
name: "@elek-io/core",
|
|
42
|
+
version: "0.16.1",
|
|
43
|
+
description: "Handles core functionality of elek.io Projects like file IO and version control.",
|
|
44
|
+
homepage: "https://elek.io",
|
|
45
|
+
repository: "https://github.com/elek-io/core",
|
|
46
|
+
bugs: { "url": "https://github.com/elek-io/core/issues" },
|
|
47
|
+
type: "module",
|
|
48
|
+
bin: { "elek": "./dist/cli/index.cli.mjs" },
|
|
49
|
+
files: [
|
|
50
|
+
"dist/node",
|
|
51
|
+
"dist/browser",
|
|
52
|
+
"dist/astro"
|
|
53
|
+
],
|
|
54
|
+
exports: {
|
|
55
|
+
".": {
|
|
56
|
+
"node": { "import": {
|
|
57
|
+
"types": "./dist/node/index.node.d.mts",
|
|
58
|
+
"default": "./dist/node/index.node.mjs"
|
|
59
|
+
} },
|
|
60
|
+
"import": {
|
|
61
|
+
"types": "./dist/browser/index.browser.d.ts",
|
|
62
|
+
"default": "./dist/browser/index.browser.js"
|
|
63
|
+
}
|
|
64
|
+
},
|
|
65
|
+
"./astro": { "import": {
|
|
66
|
+
"types": "./dist/astro/index.astro.d.mts",
|
|
67
|
+
"default": "./dist/astro/index.astro.mjs"
|
|
68
|
+
} }
|
|
69
|
+
},
|
|
70
|
+
pnpm: { "overrides": {} },
|
|
71
|
+
scripts: {
|
|
72
|
+
"lint": "eslint",
|
|
73
|
+
"check-types": "tsc --noEmit",
|
|
74
|
+
"check-format": "prettier --check . || exit 0",
|
|
75
|
+
"format": "prettier --write .",
|
|
76
|
+
"dev": "vitest",
|
|
77
|
+
"test": "vitest run",
|
|
78
|
+
"coverage": "vitest run --coverage",
|
|
79
|
+
"build": "tsdown",
|
|
80
|
+
"release": "changeset publish"
|
|
81
|
+
},
|
|
82
|
+
dependencies: {
|
|
83
|
+
"@commander-js/extra-typings": "14.0.0",
|
|
84
|
+
"@hono/node-server": "1.19.11",
|
|
85
|
+
"@hono/zod-openapi": "1.2.2",
|
|
86
|
+
"@scalar/hono-api-reference": "0.10.0",
|
|
87
|
+
"@sindresorhus/slugify": "3.0.0",
|
|
88
|
+
"chokidar": "5.0.0",
|
|
89
|
+
"code-block-writer": "13.0.3",
|
|
90
|
+
"commander": "14.0.3",
|
|
91
|
+
"fs-extra": "11.3.4",
|
|
92
|
+
"hono": "4.12.5",
|
|
93
|
+
"mime": "4.1.0",
|
|
94
|
+
"p-queue": "9.1.0",
|
|
95
|
+
"semver": "7.7.4",
|
|
96
|
+
"tsdown": "0.21.0",
|
|
97
|
+
"uuid": "13.0.0",
|
|
98
|
+
"winston": "3.19.0",
|
|
99
|
+
"winston-daily-rotate-file": "5.0.0",
|
|
100
|
+
"zod": "4.3.6"
|
|
101
|
+
},
|
|
102
|
+
devDependencies: {
|
|
103
|
+
"@changesets/cli": "2.30.0",
|
|
104
|
+
"@eslint/js": "10.0.1",
|
|
105
|
+
"@faker-js/faker": "10.3.0",
|
|
106
|
+
"@tsconfig/node24": "24.0.4",
|
|
107
|
+
"@tsconfig/strictest": "2.0.8",
|
|
108
|
+
"@types/fs-extra": "11.0.4",
|
|
109
|
+
"@types/node": "24.12.0",
|
|
110
|
+
"@types/semver": "7.7.1",
|
|
111
|
+
"@vitest/coverage-v8": "4.0.18",
|
|
112
|
+
"astro": "5.18.0",
|
|
113
|
+
"eslint": "10.0.3",
|
|
114
|
+
"eslint-config-prettier": "10.1.8",
|
|
115
|
+
"globals": "17.4.0",
|
|
116
|
+
"jiti": "2.6.1",
|
|
117
|
+
"prettier": "3.8.1",
|
|
118
|
+
"typescript": "5.9.3",
|
|
119
|
+
"typescript-eslint": "8.56.1",
|
|
120
|
+
"vitest": "4.0.18"
|
|
121
|
+
},
|
|
122
|
+
peerDependencies: {
|
|
123
|
+
"astro": ">=5.0.0",
|
|
124
|
+
"dugite": "3.2.0"
|
|
125
|
+
},
|
|
126
|
+
peerDependenciesMeta: { "astro": { "optional": true } }
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
//#endregion
|
|
130
|
+
//#region src/api/middleware/requestResponseLogger.ts
|
|
131
|
+
/**
|
|
132
|
+
* Middleware that logs the details of each request and response
|
|
133
|
+
*/
|
|
134
|
+
const requestResponseLogger = createMiddleware(async (c, next) => {
|
|
135
|
+
const { method, url } = c.req;
|
|
136
|
+
const requestId = c.get("requestId");
|
|
137
|
+
c.var.logService.info({
|
|
138
|
+
source: "core",
|
|
139
|
+
message: `Recieved API request "${method} ${url}" with requestId ${requestId}`
|
|
140
|
+
});
|
|
141
|
+
const start = Date.now();
|
|
142
|
+
await next();
|
|
143
|
+
const durationMs = Date.now() - start;
|
|
144
|
+
const statusCode = c.res.status.toString();
|
|
145
|
+
const resultLog = {
|
|
146
|
+
source: "core",
|
|
147
|
+
message: `Response for API request "${method} ${url}" with requestId ${requestId} and status code ${statusCode} in ${durationMs}ms`
|
|
148
|
+
};
|
|
149
|
+
if (statusCode.startsWith("2")) c.var.logService.info(resultLog);
|
|
150
|
+
else if (statusCode.startsWith("3")) c.var.logService.warn(resultLog);
|
|
151
|
+
else if (statusCode.startsWith("4") || statusCode.startsWith("5")) c.var.logService.error(resultLog);
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
//#endregion
|
|
155
|
+
//#region src/api/lib/util.ts
|
|
156
|
+
/**
|
|
157
|
+
* Creates a new OpenAPIHono router with default settings
|
|
158
|
+
*/
|
|
159
|
+
function createRouter() {
|
|
160
|
+
return new OpenAPIHono({ defaultHook: (result, c) => {
|
|
161
|
+
if (!result.success) return c.json({
|
|
162
|
+
success: result.success,
|
|
163
|
+
error: {
|
|
164
|
+
name: result.error.name,
|
|
165
|
+
issues: result.error.issues
|
|
166
|
+
}
|
|
167
|
+
}, 422);
|
|
168
|
+
return result;
|
|
169
|
+
} });
|
|
170
|
+
}
|
|
171
|
+
/**
|
|
172
|
+
* Creates a new OpenAPIHono instance, injects services into context and adds error handling
|
|
173
|
+
*/
|
|
174
|
+
function createApi(logService, projectService, collectionService, entryService, assetService) {
|
|
175
|
+
const api = createRouter();
|
|
176
|
+
api.use(requestId()).use(trimTrailingSlash()).use(cors({ origin: ["http://localhost"] })).use(createMiddleware((c, next) => {
|
|
177
|
+
c.set("logService", logService);
|
|
178
|
+
c.set("projectService", projectService);
|
|
179
|
+
c.set("collectionService", collectionService);
|
|
180
|
+
c.set("entryService", entryService);
|
|
181
|
+
c.set("assetService", assetService);
|
|
182
|
+
return next();
|
|
183
|
+
})).use(requestResponseLogger);
|
|
184
|
+
api.notFound((c) => {
|
|
185
|
+
return c.json({ message: `Not Found - ${c.req.path}` }, 404);
|
|
186
|
+
});
|
|
187
|
+
api.onError((err, c) => {
|
|
188
|
+
const currentStatus = "status" in err ? err.status : c.newResponse(null).status;
|
|
189
|
+
const statusCode = currentStatus !== 200 ? currentStatus : 500;
|
|
190
|
+
return c.json({
|
|
191
|
+
message: err.message,
|
|
192
|
+
stack: err.stack
|
|
193
|
+
}, statusCode);
|
|
194
|
+
});
|
|
195
|
+
return api;
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
//#endregion
|
|
199
|
+
//#region src/schema/baseSchema.ts
|
|
200
|
+
/**
|
|
201
|
+
* All currently supported, BCP 47 compliant language tags
|
|
202
|
+
*
|
|
203
|
+
* The support depends on the tools and libraries we use.
|
|
204
|
+
* We can't support a given language, if there is no support
|
|
205
|
+
* for it from used third parties. Currently, to check if a langauge
|
|
206
|
+
* tag can be added to this list, it needs to be supported by:
|
|
207
|
+
* - DeepL translation API
|
|
208
|
+
*
|
|
209
|
+
* @see https://www.deepl.com/docs-api/other-functions/listing-supported-languages/
|
|
210
|
+
*/
|
|
211
|
+
const supportedLanguageSchema = z.enum([
|
|
212
|
+
"bg",
|
|
213
|
+
"cs",
|
|
214
|
+
"da",
|
|
215
|
+
"de",
|
|
216
|
+
"el",
|
|
217
|
+
"en",
|
|
218
|
+
"es",
|
|
219
|
+
"et",
|
|
220
|
+
"fi",
|
|
221
|
+
"fr",
|
|
222
|
+
"hu",
|
|
223
|
+
"it",
|
|
224
|
+
"ja",
|
|
225
|
+
"lt",
|
|
226
|
+
"lv",
|
|
227
|
+
"nl",
|
|
228
|
+
"pl",
|
|
229
|
+
"pt",
|
|
230
|
+
"ro",
|
|
231
|
+
"ru",
|
|
232
|
+
"sk",
|
|
233
|
+
"sl",
|
|
234
|
+
"sv",
|
|
235
|
+
"zh"
|
|
236
|
+
]);
|
|
237
|
+
const supportedIconSchema = z.enum([
|
|
238
|
+
"home",
|
|
239
|
+
"plus",
|
|
240
|
+
"foobar"
|
|
241
|
+
]);
|
|
242
|
+
const objectTypeSchema = z.enum([
|
|
243
|
+
"project",
|
|
244
|
+
"asset",
|
|
245
|
+
"collection",
|
|
246
|
+
"entry",
|
|
247
|
+
"value",
|
|
248
|
+
"sharedValue"
|
|
249
|
+
]);
|
|
250
|
+
const logLevelSchema = z.enum([
|
|
251
|
+
"error",
|
|
252
|
+
"warn",
|
|
253
|
+
"info",
|
|
254
|
+
"debug"
|
|
255
|
+
]);
|
|
256
|
+
const versionSchema = z.string();
|
|
257
|
+
const uuidSchema = z.uuid();
|
|
258
|
+
/**
|
|
259
|
+
* A record that can be used to translate a string value into all supported languages
|
|
260
|
+
*/
|
|
261
|
+
const translatableStringSchema = z.partialRecord(supportedLanguageSchema, z.string().trim().min(1));
|
|
262
|
+
/**
|
|
263
|
+
* A record that can be used to translate a number value into all supported languages
|
|
264
|
+
*/
|
|
265
|
+
const translatableNumberSchema = z.partialRecord(supportedLanguageSchema, z.number());
|
|
266
|
+
/**
|
|
267
|
+
* A record that can be used to translate a boolean value into all supported languages
|
|
268
|
+
*/
|
|
269
|
+
const translatableBooleanSchema = z.partialRecord(supportedLanguageSchema, z.boolean());
|
|
270
|
+
function translatableArrayOf(schema) {
|
|
271
|
+
return z.partialRecord(supportedLanguageSchema, z.array(schema));
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
//#endregion
|
|
275
|
+
//#region src/schema/fileSchema.ts
|
|
276
|
+
/**
|
|
277
|
+
* A basic file structure every elek.io file on disk has to follow
|
|
278
|
+
*/
|
|
279
|
+
const baseFileSchema = z.object({
|
|
280
|
+
objectType: objectTypeSchema.readonly(),
|
|
281
|
+
id: uuidSchema.readonly(),
|
|
282
|
+
created: z.string().datetime().readonly(),
|
|
283
|
+
updated: z.string().datetime().nullable()
|
|
284
|
+
});
|
|
285
|
+
const fileReferenceSchema = z.object({
|
|
286
|
+
id: uuidSchema,
|
|
287
|
+
extension: z.string().optional()
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
//#endregion
|
|
291
|
+
//#region src/schema/gitSchema.ts
|
|
292
|
+
/**
|
|
293
|
+
* Signature git uses to identify users
|
|
294
|
+
*/
|
|
295
|
+
const gitSignatureSchema = z.object({
|
|
296
|
+
name: z.string(),
|
|
297
|
+
email: z.string().email()
|
|
298
|
+
});
|
|
299
|
+
const gitMessageSchema = z.object({
|
|
300
|
+
method: z.enum([
|
|
301
|
+
"create",
|
|
302
|
+
"update",
|
|
303
|
+
"delete",
|
|
304
|
+
"upgrade"
|
|
305
|
+
]),
|
|
306
|
+
reference: z.object({
|
|
307
|
+
objectType: objectTypeSchema,
|
|
308
|
+
id: uuidSchema,
|
|
309
|
+
collectionId: uuidSchema.optional()
|
|
310
|
+
})
|
|
311
|
+
});
|
|
312
|
+
const gitTagSchema = z.object({
|
|
313
|
+
id: uuidSchema,
|
|
314
|
+
message: z.string(),
|
|
315
|
+
author: gitSignatureSchema,
|
|
316
|
+
datetime: z.string().datetime()
|
|
317
|
+
});
|
|
318
|
+
const gitCommitSchema = z.object({
|
|
319
|
+
hash: z.string(),
|
|
320
|
+
message: gitMessageSchema,
|
|
321
|
+
author: gitSignatureSchema,
|
|
322
|
+
datetime: z.string().datetime(),
|
|
323
|
+
tag: gitTagSchema.nullable()
|
|
324
|
+
});
|
|
325
|
+
const gitInitOptionsSchema = z.object({ initialBranch: z.string() });
|
|
326
|
+
const gitCloneOptionsSchema = z.object({
|
|
327
|
+
depth: z.number(),
|
|
328
|
+
singleBranch: z.boolean(),
|
|
329
|
+
branch: z.string(),
|
|
330
|
+
bare: z.boolean()
|
|
331
|
+
});
|
|
332
|
+
const gitMergeOptionsSchema = z.object({ squash: z.boolean() });
|
|
333
|
+
const gitSwitchOptionsSchema = z.object({ isNew: z.boolean().optional() });
|
|
334
|
+
const gitLogOptionsSchema = z.object({
|
|
335
|
+
limit: z.number().optional(),
|
|
336
|
+
between: z.object({
|
|
337
|
+
from: z.string(),
|
|
338
|
+
to: z.string().optional()
|
|
339
|
+
}),
|
|
340
|
+
filePath: z.string().optional()
|
|
341
|
+
});
|
|
342
|
+
const createGitTagSchema = gitTagSchema.pick({ message: true }).extend({
|
|
343
|
+
path: z.string(),
|
|
344
|
+
hash: z.string().optional()
|
|
345
|
+
});
|
|
346
|
+
const readGitTagSchema = z.object({
|
|
347
|
+
path: z.string(),
|
|
348
|
+
id: uuidSchema.readonly()
|
|
349
|
+
});
|
|
350
|
+
const deleteGitTagSchema = readGitTagSchema.extend({});
|
|
351
|
+
const countGitTagsSchema = z.object({ path: z.string() });
|
|
352
|
+
|
|
353
|
+
//#endregion
|
|
354
|
+
//#region src/schema/assetSchema.ts
|
|
355
|
+
const assetFileSchema = baseFileSchema.extend({
|
|
356
|
+
objectType: z.literal(objectTypeSchema.enum.asset).readonly(),
|
|
357
|
+
name: z.string(),
|
|
358
|
+
description: z.string(),
|
|
359
|
+
extension: z.string().readonly(),
|
|
360
|
+
mimeType: z.string().readonly(),
|
|
361
|
+
size: z.number().readonly()
|
|
362
|
+
});
|
|
363
|
+
const assetSchema = assetFileSchema.extend({
|
|
364
|
+
absolutePath: z.string().readonly(),
|
|
365
|
+
history: z.array(gitCommitSchema)
|
|
366
|
+
}).openapi("Asset");
|
|
367
|
+
const assetExportSchema = assetSchema.extend({});
|
|
368
|
+
const createAssetSchema = assetFileSchema.pick({
|
|
369
|
+
name: true,
|
|
370
|
+
description: true
|
|
371
|
+
}).extend({
|
|
372
|
+
projectId: uuidSchema.readonly(),
|
|
373
|
+
filePath: z.string().readonly()
|
|
374
|
+
});
|
|
375
|
+
const readAssetSchema = assetFileSchema.pick({ id: true }).extend({
|
|
376
|
+
projectId: uuidSchema.readonly(),
|
|
377
|
+
commitHash: z.string().optional().readonly()
|
|
378
|
+
});
|
|
379
|
+
const saveAssetSchema = assetFileSchema.pick({ id: true }).extend({
|
|
380
|
+
projectId: uuidSchema.readonly(),
|
|
381
|
+
filePath: z.string().readonly(),
|
|
382
|
+
commitHash: z.string().optional().readonly()
|
|
383
|
+
});
|
|
384
|
+
const updateAssetSchema = assetFileSchema.pick({
|
|
385
|
+
id: true,
|
|
386
|
+
name: true,
|
|
387
|
+
description: true
|
|
388
|
+
}).extend({
|
|
389
|
+
projectId: uuidSchema.readonly(),
|
|
390
|
+
newFilePath: z.string().readonly().optional()
|
|
391
|
+
});
|
|
392
|
+
const deleteAssetSchema = assetFileSchema.pick({
|
|
393
|
+
id: true,
|
|
394
|
+
extension: true
|
|
395
|
+
}).extend({ projectId: uuidSchema.readonly() });
|
|
396
|
+
const countAssetsSchema = z.object({ projectId: uuidSchema.readonly() });
|
|
397
|
+
|
|
398
|
+
//#endregion
|
|
399
|
+
//#region src/schema/valueSchema.ts
|
|
400
|
+
const ValueTypeSchema = z.enum([
|
|
401
|
+
"string",
|
|
402
|
+
"number",
|
|
403
|
+
"boolean",
|
|
404
|
+
"reference"
|
|
405
|
+
]);
|
|
406
|
+
const valueContentReferenceBase = z.object({ id: uuidSchema });
|
|
407
|
+
const valueContentReferenceToAssetSchema = valueContentReferenceBase.extend({ objectType: z.literal(objectTypeSchema.enum.asset) });
|
|
408
|
+
const valueContentReferenceToCollectionSchema = valueContentReferenceBase.extend({ objectType: z.literal(objectTypeSchema.enum.collection) });
|
|
409
|
+
const valueContentReferenceToEntrySchema = valueContentReferenceBase.extend({ objectType: z.literal(objectTypeSchema.enum.entry) });
|
|
410
|
+
const valueContentReferenceSchema = z.union([
|
|
411
|
+
valueContentReferenceToAssetSchema,
|
|
412
|
+
valueContentReferenceToCollectionSchema,
|
|
413
|
+
valueContentReferenceToEntrySchema
|
|
414
|
+
]);
|
|
415
|
+
const directValueBaseSchema = z.object({
|
|
416
|
+
objectType: z.literal(objectTypeSchema.enum.value).readonly(),
|
|
417
|
+
fieldDefinitionId: uuidSchema.readonly()
|
|
418
|
+
});
|
|
419
|
+
const directStringValueSchema = directValueBaseSchema.extend({
|
|
420
|
+
valueType: z.literal(ValueTypeSchema.enum.string).readonly(),
|
|
421
|
+
content: translatableStringSchema
|
|
422
|
+
});
|
|
423
|
+
const directNumberValueSchema = directValueBaseSchema.extend({
|
|
424
|
+
valueType: z.literal(ValueTypeSchema.enum.number).readonly(),
|
|
425
|
+
content: translatableNumberSchema
|
|
426
|
+
});
|
|
427
|
+
const directBooleanValueSchema = directValueBaseSchema.extend({
|
|
428
|
+
valueType: z.literal(ValueTypeSchema.enum.boolean).readonly(),
|
|
429
|
+
content: translatableBooleanSchema
|
|
430
|
+
});
|
|
431
|
+
const directValueSchema = z.union([
|
|
432
|
+
directStringValueSchema,
|
|
433
|
+
directNumberValueSchema,
|
|
434
|
+
directBooleanValueSchema
|
|
435
|
+
]);
|
|
436
|
+
const referencedValueSchema = z.object({
|
|
437
|
+
objectType: z.literal(objectTypeSchema.enum.value).readonly(),
|
|
438
|
+
fieldDefinitionId: uuidSchema.readonly(),
|
|
439
|
+
valueType: z.literal(ValueTypeSchema.enum.reference).readonly(),
|
|
440
|
+
content: translatableArrayOf(valueContentReferenceSchema)
|
|
441
|
+
});
|
|
442
|
+
const valueSchema = z.union([directValueSchema, referencedValueSchema]);
|
|
443
|
+
/**
|
|
444
|
+
* ---
|
|
445
|
+
*/
|
|
446
|
+
/**
|
|
447
|
+
* @todo maybe we need to validate Values and shared Values
|
|
448
|
+
*/
|
|
449
|
+
|
|
450
|
+
//#endregion
|
|
451
|
+
//#region src/schema/entrySchema.ts
|
|
452
|
+
const entryFileSchema = baseFileSchema.extend({
|
|
453
|
+
objectType: z.literal(objectTypeSchema.enum.entry).readonly(),
|
|
454
|
+
values: z.array(valueSchema)
|
|
455
|
+
});
|
|
456
|
+
const entrySchema = entryFileSchema.extend({ history: z.array(gitCommitSchema) }).openapi("Entry");
|
|
457
|
+
const entryExportSchema = entrySchema.extend({});
|
|
458
|
+
const createEntrySchema = entryFileSchema.omit({
|
|
459
|
+
id: true,
|
|
460
|
+
objectType: true,
|
|
461
|
+
created: true,
|
|
462
|
+
updated: true
|
|
463
|
+
}).extend({
|
|
464
|
+
projectId: uuidSchema.readonly(),
|
|
465
|
+
collectionId: uuidSchema.readonly(),
|
|
466
|
+
values: z.array(valueSchema)
|
|
467
|
+
});
|
|
468
|
+
const readEntrySchema = z.object({
|
|
469
|
+
id: uuidSchema.readonly(),
|
|
470
|
+
projectId: uuidSchema.readonly(),
|
|
471
|
+
collectionId: uuidSchema.readonly(),
|
|
472
|
+
commitHash: z.string().optional().readonly()
|
|
473
|
+
});
|
|
474
|
+
const updateEntrySchema = entryFileSchema.omit({
|
|
475
|
+
objectType: true,
|
|
476
|
+
created: true,
|
|
477
|
+
updated: true
|
|
478
|
+
}).extend({
|
|
479
|
+
projectId: uuidSchema.readonly(),
|
|
480
|
+
collectionId: uuidSchema.readonly()
|
|
481
|
+
});
|
|
482
|
+
const deleteEntrySchema = readEntrySchema.extend({});
|
|
483
|
+
const countEntriesSchema = z.object({
|
|
484
|
+
projectId: uuidSchema.readonly(),
|
|
485
|
+
collectionId: uuidSchema.readonly()
|
|
486
|
+
});
|
|
487
|
+
|
|
488
|
+
//#endregion
|
|
489
|
+
//#region src/schema/fieldSchema.ts
|
|
490
|
+
const FieldTypeSchema = z.enum([
|
|
491
|
+
"text",
|
|
492
|
+
"textarea",
|
|
493
|
+
"email",
|
|
494
|
+
"url",
|
|
495
|
+
"ipv4",
|
|
496
|
+
"date",
|
|
497
|
+
"time",
|
|
498
|
+
"datetime",
|
|
499
|
+
"telephone",
|
|
500
|
+
"number",
|
|
501
|
+
"range",
|
|
502
|
+
"toggle",
|
|
503
|
+
"asset",
|
|
504
|
+
"entry"
|
|
505
|
+
]);
|
|
506
|
+
const FieldWidthSchema = z.enum([
|
|
507
|
+
"12",
|
|
508
|
+
"6",
|
|
509
|
+
"4",
|
|
510
|
+
"3"
|
|
511
|
+
]);
|
|
512
|
+
const FieldDefinitionBaseSchema = z.object({
|
|
513
|
+
id: uuidSchema.readonly(),
|
|
514
|
+
label: translatableStringSchema,
|
|
515
|
+
description: translatableStringSchema.nullable(),
|
|
516
|
+
isRequired: z.boolean(),
|
|
517
|
+
isDisabled: z.boolean(),
|
|
518
|
+
isUnique: z.boolean(),
|
|
519
|
+
inputWidth: FieldWidthSchema
|
|
520
|
+
});
|
|
521
|
+
/**
|
|
522
|
+
* String based Field definitions
|
|
523
|
+
*/
|
|
524
|
+
const StringFieldDefinitionBaseSchema = FieldDefinitionBaseSchema.extend({
|
|
525
|
+
valueType: z.literal(ValueTypeSchema.enum.string),
|
|
526
|
+
defaultValue: z.string().nullable()
|
|
527
|
+
});
|
|
528
|
+
const textFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
529
|
+
fieldType: z.literal(FieldTypeSchema.enum.text),
|
|
530
|
+
min: z.number().nullable(),
|
|
531
|
+
max: z.number().nullable()
|
|
532
|
+
});
|
|
533
|
+
const textareaFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
534
|
+
fieldType: z.literal(FieldTypeSchema.enum.textarea),
|
|
535
|
+
min: z.number().nullable(),
|
|
536
|
+
max: z.number().nullable()
|
|
537
|
+
});
|
|
538
|
+
const emailFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
539
|
+
fieldType: z.literal(FieldTypeSchema.enum.email),
|
|
540
|
+
defaultValue: z.email().nullable()
|
|
541
|
+
});
|
|
542
|
+
const urlFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
543
|
+
fieldType: z.literal(FieldTypeSchema.enum.url),
|
|
544
|
+
defaultValue: z.url().nullable()
|
|
545
|
+
});
|
|
546
|
+
const ipv4FieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
547
|
+
fieldType: z.literal(FieldTypeSchema.enum.ipv4),
|
|
548
|
+
defaultValue: z.ipv4().nullable()
|
|
549
|
+
});
|
|
550
|
+
const dateFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
551
|
+
fieldType: z.literal(FieldTypeSchema.enum.date),
|
|
552
|
+
defaultValue: z.iso.date().nullable()
|
|
553
|
+
});
|
|
554
|
+
const timeFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
555
|
+
fieldType: z.literal(FieldTypeSchema.enum.time),
|
|
556
|
+
defaultValue: z.iso.time().nullable()
|
|
557
|
+
});
|
|
558
|
+
const datetimeFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
559
|
+
fieldType: z.literal(FieldTypeSchema.enum.datetime),
|
|
560
|
+
defaultValue: z.iso.datetime().nullable()
|
|
561
|
+
});
|
|
562
|
+
const telephoneFieldDefinitionSchema = StringFieldDefinitionBaseSchema.extend({
|
|
563
|
+
fieldType: z.literal(FieldTypeSchema.enum.telephone),
|
|
564
|
+
defaultValue: z.e164().nullable()
|
|
565
|
+
});
|
|
566
|
+
const stringFieldDefinitionSchema = z.union([
|
|
567
|
+
textFieldDefinitionSchema,
|
|
568
|
+
textareaFieldDefinitionSchema,
|
|
569
|
+
emailFieldDefinitionSchema,
|
|
570
|
+
urlFieldDefinitionSchema,
|
|
571
|
+
ipv4FieldDefinitionSchema,
|
|
572
|
+
dateFieldDefinitionSchema,
|
|
573
|
+
timeFieldDefinitionSchema,
|
|
574
|
+
datetimeFieldDefinitionSchema,
|
|
575
|
+
telephoneFieldDefinitionSchema
|
|
576
|
+
]);
|
|
577
|
+
/**
|
|
578
|
+
* Number based Field definitions
|
|
579
|
+
*/
|
|
580
|
+
const NumberFieldDefinitionBaseSchema = FieldDefinitionBaseSchema.extend({
|
|
581
|
+
valueType: z.literal(ValueTypeSchema.enum.number),
|
|
582
|
+
min: z.number().nullable(),
|
|
583
|
+
max: z.number().nullable(),
|
|
584
|
+
isUnique: z.literal(false),
|
|
585
|
+
defaultValue: z.number().nullable()
|
|
586
|
+
});
|
|
587
|
+
const numberFieldDefinitionSchema = NumberFieldDefinitionBaseSchema.extend({ fieldType: z.literal(FieldTypeSchema.enum.number) });
|
|
588
|
+
const rangeFieldDefinitionSchema = NumberFieldDefinitionBaseSchema.extend({
|
|
589
|
+
fieldType: z.literal(FieldTypeSchema.enum.range),
|
|
590
|
+
isRequired: z.literal(true),
|
|
591
|
+
min: z.number(),
|
|
592
|
+
max: z.number(),
|
|
593
|
+
defaultValue: z.number()
|
|
594
|
+
});
|
|
595
|
+
/**
|
|
596
|
+
* Boolean based Field definitions
|
|
597
|
+
*/
|
|
598
|
+
const BooleanFieldDefinitionBaseSchema = FieldDefinitionBaseSchema.extend({
|
|
599
|
+
valueType: z.literal(ValueTypeSchema.enum.boolean),
|
|
600
|
+
isRequired: z.literal(true),
|
|
601
|
+
defaultValue: z.boolean(),
|
|
602
|
+
isUnique: z.literal(false)
|
|
603
|
+
});
|
|
604
|
+
const toggleFieldDefinitionSchema = BooleanFieldDefinitionBaseSchema.extend({ fieldType: z.literal(FieldTypeSchema.enum.toggle) });
|
|
605
|
+
/**
|
|
606
|
+
* Reference based Field definitions
|
|
607
|
+
*/
|
|
608
|
+
const ReferenceFieldDefinitionBaseSchema = FieldDefinitionBaseSchema.extend({ valueType: z.literal(ValueTypeSchema.enum.reference) });
|
|
609
|
+
const assetFieldDefinitionSchema = ReferenceFieldDefinitionBaseSchema.extend({
|
|
610
|
+
fieldType: z.literal(FieldTypeSchema.enum.asset),
|
|
611
|
+
min: z.number().nullable(),
|
|
612
|
+
max: z.number().nullable()
|
|
613
|
+
});
|
|
614
|
+
const entryFieldDefinitionSchema = ReferenceFieldDefinitionBaseSchema.extend({
|
|
615
|
+
fieldType: z.literal(FieldTypeSchema.enum.entry),
|
|
616
|
+
ofCollections: z.array(uuidSchema),
|
|
617
|
+
min: z.number().nullable(),
|
|
618
|
+
max: z.number().nullable()
|
|
619
|
+
});
|
|
620
|
+
const fieldDefinitionSchema = z.union([
|
|
621
|
+
stringFieldDefinitionSchema,
|
|
622
|
+
numberFieldDefinitionSchema,
|
|
623
|
+
rangeFieldDefinitionSchema,
|
|
624
|
+
toggleFieldDefinitionSchema,
|
|
625
|
+
assetFieldDefinitionSchema,
|
|
626
|
+
entryFieldDefinitionSchema
|
|
627
|
+
]);
|
|
628
|
+
|
|
629
|
+
//#endregion
|
|
630
|
+
//#region src/schema/collectionSchema.ts
|
|
631
|
+
const collectionFileSchema = baseFileSchema.extend({
|
|
632
|
+
objectType: z.literal(objectTypeSchema.enum.collection).readonly(),
|
|
633
|
+
name: z.object({
|
|
634
|
+
singular: translatableStringSchema,
|
|
635
|
+
plural: translatableStringSchema
|
|
636
|
+
}),
|
|
637
|
+
slug: z.object({
|
|
638
|
+
singular: z.string(),
|
|
639
|
+
plural: z.string()
|
|
640
|
+
}),
|
|
641
|
+
description: translatableStringSchema,
|
|
642
|
+
icon: supportedIconSchema,
|
|
643
|
+
fieldDefinitions: z.array(fieldDefinitionSchema)
|
|
644
|
+
});
|
|
645
|
+
const collectionSchema = collectionFileSchema.extend({ history: z.array(gitCommitSchema) }).openapi("Collection");
|
|
646
|
+
const collectionExportSchema = collectionSchema.extend({ entries: z.array(entryExportSchema) });
|
|
647
|
+
const createCollectionSchema = collectionFileSchema.omit({
|
|
648
|
+
id: true,
|
|
649
|
+
objectType: true,
|
|
650
|
+
created: true,
|
|
651
|
+
updated: true
|
|
652
|
+
}).extend({ projectId: uuidSchema.readonly() });
|
|
653
|
+
const readCollectionSchema = z.object({
|
|
654
|
+
id: uuidSchema.readonly(),
|
|
655
|
+
projectId: uuidSchema.readonly(),
|
|
656
|
+
commitHash: z.string().optional().readonly()
|
|
657
|
+
});
|
|
658
|
+
const updateCollectionSchema = collectionFileSchema.pick({
|
|
659
|
+
id: true,
|
|
660
|
+
name: true,
|
|
661
|
+
slug: true,
|
|
662
|
+
description: true,
|
|
663
|
+
icon: true,
|
|
664
|
+
fieldDefinitions: true
|
|
665
|
+
}).extend({ projectId: uuidSchema.readonly() });
|
|
666
|
+
const deleteCollectionSchema = readCollectionSchema.extend({});
|
|
667
|
+
const countCollectionsSchema = z.object({ projectId: uuidSchema.readonly() });
|
|
668
|
+
|
|
669
|
+
//#endregion
|
|
670
|
+
//#region src/schema/coreSchema.ts
|
|
671
|
+
/**
|
|
672
|
+
* Options that can be passed to elek.io core
|
|
673
|
+
*/
|
|
674
|
+
const elekIoCoreOptionsSchema = z.object({
|
|
675
|
+
log: z.object({ level: logLevelSchema }),
|
|
676
|
+
file: z.object({ cache: z.boolean() })
|
|
677
|
+
});
|
|
678
|
+
const constructorElekIoCoreSchema = elekIoCoreOptionsSchema.partial({
|
|
679
|
+
log: true,
|
|
680
|
+
file: true
|
|
681
|
+
}).optional();
|
|
682
|
+
|
|
683
|
+
//#endregion
|
|
684
|
+
//#region src/schema/projectSchema.ts
|
|
685
|
+
const projectStatusSchema = z.enum([
|
|
686
|
+
"foo",
|
|
687
|
+
"bar",
|
|
688
|
+
"todo"
|
|
689
|
+
]);
|
|
690
|
+
const projectSettingsSchema = z.object({ language: z.object({
|
|
691
|
+
default: supportedLanguageSchema,
|
|
692
|
+
supported: z.array(supportedLanguageSchema)
|
|
693
|
+
}) });
|
|
694
|
+
const projectFolderSchema = z.enum([
|
|
695
|
+
"assets",
|
|
696
|
+
"collections",
|
|
697
|
+
"shared-values",
|
|
698
|
+
"lfs"
|
|
699
|
+
]);
|
|
700
|
+
const projectBranchSchema = z.enum(["production", "work"]);
|
|
701
|
+
const projectFileSchema = baseFileSchema.extend({
|
|
702
|
+
objectType: z.literal(objectTypeSchema.enum.project).readonly(),
|
|
703
|
+
coreVersion: versionSchema,
|
|
704
|
+
name: z.string().trim().min(1),
|
|
705
|
+
description: z.string().trim().min(1),
|
|
706
|
+
version: versionSchema,
|
|
707
|
+
status: projectStatusSchema,
|
|
708
|
+
settings: projectSettingsSchema
|
|
709
|
+
});
|
|
710
|
+
const projectSchema = projectFileSchema.extend({
|
|
711
|
+
remoteOriginUrl: z.string().nullable().openapi({ description: "URL of the remote Git repository" }),
|
|
712
|
+
history: z.array(gitCommitSchema).openapi({ description: "Commit history of this Project" }),
|
|
713
|
+
fullHistory: z.array(gitCommitSchema).openapi({ description: "Full commit history of this Project including all Assets, Collections, Entries and other files" })
|
|
714
|
+
}).openapi("Project");
|
|
715
|
+
const migrateProjectSchema = projectFileSchema.pick({
|
|
716
|
+
id: true,
|
|
717
|
+
coreVersion: true
|
|
718
|
+
}).loose();
|
|
719
|
+
const projectExportSchema = projectSchema.extend({
|
|
720
|
+
assets: z.array(assetExportSchema),
|
|
721
|
+
collections: z.array(collectionExportSchema)
|
|
722
|
+
});
|
|
723
|
+
const createProjectSchema = projectSchema.pick({
|
|
724
|
+
name: true,
|
|
725
|
+
description: true,
|
|
726
|
+
settings: true
|
|
727
|
+
});
|
|
728
|
+
const readProjectSchema = z.object({
|
|
729
|
+
id: uuidSchema.readonly(),
|
|
730
|
+
commitHash: z.string().optional().readonly()
|
|
731
|
+
});
|
|
732
|
+
const updateProjectSchema = projectSchema.pick({
|
|
733
|
+
id: true,
|
|
734
|
+
name: true,
|
|
735
|
+
description: true,
|
|
736
|
+
settings: true
|
|
737
|
+
});
|
|
738
|
+
const upgradeProjectSchema = z.object({
|
|
739
|
+
id: uuidSchema.readonly(),
|
|
740
|
+
force: z.boolean().optional()
|
|
741
|
+
});
|
|
742
|
+
const deleteProjectSchema = readProjectSchema.extend({ force: z.boolean().optional() });
|
|
743
|
+
const projectUpgradeSchema = z.object({
|
|
744
|
+
to: versionSchema.readonly(),
|
|
745
|
+
run: z.function({
|
|
746
|
+
input: [projectFileSchema],
|
|
747
|
+
output: z.promise(z.void())
|
|
748
|
+
})
|
|
749
|
+
});
|
|
750
|
+
const cloneProjectSchema = z.object({ url: z.string() });
|
|
751
|
+
const listBranchesProjectSchema = z.object({ id: uuidSchema.readonly() });
|
|
752
|
+
const currentBranchProjectSchema = z.object({ id: uuidSchema.readonly() });
|
|
753
|
+
const switchBranchProjectSchema = z.object({
|
|
754
|
+
id: uuidSchema.readonly(),
|
|
755
|
+
branch: z.string(),
|
|
756
|
+
options: gitSwitchOptionsSchema.optional()
|
|
757
|
+
});
|
|
758
|
+
const getRemoteOriginUrlProjectSchema = z.object({ id: uuidSchema.readonly() });
|
|
759
|
+
const setRemoteOriginUrlProjectSchema = z.object({
|
|
760
|
+
id: uuidSchema.readonly(),
|
|
761
|
+
url: z.string()
|
|
762
|
+
});
|
|
763
|
+
const getChangesProjectSchema = z.object({ id: uuidSchema.readonly() });
|
|
764
|
+
const synchronizeProjectSchema = z.object({ id: uuidSchema.readonly() });
|
|
765
|
+
const searchProjectSchema = z.object({
|
|
766
|
+
id: uuidSchema.readonly(),
|
|
767
|
+
query: z.string(),
|
|
768
|
+
language: supportedLanguageSchema,
|
|
769
|
+
type: z.array(objectTypeSchema).optional()
|
|
770
|
+
});
|
|
771
|
+
|
|
772
|
+
//#endregion
|
|
773
|
+
//#region src/schema/schemaFromFieldDefinition.ts
|
|
774
|
+
/**
|
|
775
|
+
* Dynamic zod schema generation
|
|
776
|
+
*
|
|
777
|
+
* Altough everything is already strictly typed, a type of string might not be an email or text of a certain length.
|
|
778
|
+
* To validate this, we need to generate zod schemas based on Field definitions the user created.
|
|
779
|
+
*/
|
|
780
|
+
/**
|
|
781
|
+
* Boolean Values are always either true or false, so we don't need the Field definition here
|
|
782
|
+
*/
|
|
783
|
+
function getBooleanValueContentSchemaFromFieldDefinition() {
|
|
784
|
+
return z.boolean();
|
|
785
|
+
}
|
|
786
|
+
/**
|
|
787
|
+
* Number Values can have min and max values and can be required or not
|
|
788
|
+
*/
|
|
789
|
+
function getNumberValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
790
|
+
let schema = z.number();
|
|
791
|
+
if (fieldDefinition.min) schema = schema.min(fieldDefinition.min);
|
|
792
|
+
if (fieldDefinition.max) schema = schema.max(fieldDefinition.max);
|
|
793
|
+
if (fieldDefinition.isRequired === false) return schema.nullable();
|
|
794
|
+
return schema;
|
|
795
|
+
}
|
|
796
|
+
/**
|
|
797
|
+
* String Values can have different formats (email, url, ipv4, date, time, ...)
|
|
798
|
+
* and can have min and max length and can be required or not
|
|
799
|
+
*/
|
|
800
|
+
function getStringValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
801
|
+
let schema = null;
|
|
802
|
+
switch (fieldDefinition.fieldType) {
|
|
803
|
+
case FieldTypeSchema.enum.email:
|
|
804
|
+
schema = z.email();
|
|
805
|
+
break;
|
|
806
|
+
case FieldTypeSchema.enum.url:
|
|
807
|
+
schema = z.url();
|
|
808
|
+
break;
|
|
809
|
+
case FieldTypeSchema.enum.ipv4:
|
|
810
|
+
schema = z.ipv4();
|
|
811
|
+
break;
|
|
812
|
+
case FieldTypeSchema.enum.date:
|
|
813
|
+
schema = z.iso.date();
|
|
814
|
+
break;
|
|
815
|
+
case FieldTypeSchema.enum.time:
|
|
816
|
+
schema = z.iso.time();
|
|
817
|
+
break;
|
|
818
|
+
case FieldTypeSchema.enum.datetime:
|
|
819
|
+
schema = z.iso.datetime();
|
|
820
|
+
break;
|
|
821
|
+
case FieldTypeSchema.enum.telephone:
|
|
822
|
+
schema = z.e164();
|
|
823
|
+
break;
|
|
824
|
+
case FieldTypeSchema.enum.text:
|
|
825
|
+
case FieldTypeSchema.enum.textarea:
|
|
826
|
+
schema = z.string().trim();
|
|
827
|
+
break;
|
|
828
|
+
}
|
|
829
|
+
if ("min" in fieldDefinition && fieldDefinition.min) schema = schema.min(fieldDefinition.min);
|
|
830
|
+
if ("max" in fieldDefinition && fieldDefinition.max) schema = schema.max(fieldDefinition.max);
|
|
831
|
+
if (fieldDefinition.isRequired === false) return schema.nullable();
|
|
832
|
+
return schema.min(1);
|
|
833
|
+
}
|
|
834
|
+
/**
|
|
835
|
+
* Reference Values can reference either Assets or Entries (or Shared Values in the future)
|
|
836
|
+
* and can have min and max number of references and can be required or not
|
|
837
|
+
*/
|
|
838
|
+
function getReferenceValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
839
|
+
let schema;
|
|
840
|
+
switch (fieldDefinition.fieldType) {
|
|
841
|
+
case FieldTypeSchema.enum.asset:
|
|
842
|
+
schema = z.array(valueContentReferenceToAssetSchema);
|
|
843
|
+
break;
|
|
844
|
+
case FieldTypeSchema.enum.entry:
|
|
845
|
+
schema = z.array(valueContentReferenceToEntrySchema);
|
|
846
|
+
break;
|
|
847
|
+
}
|
|
848
|
+
if (fieldDefinition.isRequired) schema = schema.min(1);
|
|
849
|
+
if (fieldDefinition.min) schema = schema.min(fieldDefinition.min);
|
|
850
|
+
if (fieldDefinition.max) schema = schema.max(fieldDefinition.max);
|
|
851
|
+
return schema;
|
|
852
|
+
}
|
|
853
|
+
function getTranslatableStringValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
854
|
+
return z.partialRecord(supportedLanguageSchema, getStringValueContentSchemaFromFieldDefinition(fieldDefinition));
|
|
855
|
+
}
|
|
856
|
+
function getTranslatableNumberValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
857
|
+
return z.partialRecord(supportedLanguageSchema, getNumberValueContentSchemaFromFieldDefinition(fieldDefinition));
|
|
858
|
+
}
|
|
859
|
+
function getTranslatableBooleanValueContentSchemaFromFieldDefinition() {
|
|
860
|
+
return z.partialRecord(supportedLanguageSchema, getBooleanValueContentSchemaFromFieldDefinition());
|
|
861
|
+
}
|
|
862
|
+
function getTranslatableReferenceValueContentSchemaFromFieldDefinition(fieldDefinition) {
|
|
863
|
+
return z.partialRecord(supportedLanguageSchema, getReferenceValueContentSchemaFromFieldDefinition(fieldDefinition));
|
|
864
|
+
}
|
|
865
|
+
/**
|
|
866
|
+
* Generates a zod schema to check a Value based on given Field definition
|
|
867
|
+
*/
|
|
868
|
+
function getValueSchemaFromFieldDefinition(fieldDefinition) {
|
|
869
|
+
switch (fieldDefinition.valueType) {
|
|
870
|
+
case ValueTypeSchema.enum.boolean: return directBooleanValueSchema.extend({ content: getTranslatableBooleanValueContentSchemaFromFieldDefinition() });
|
|
871
|
+
case ValueTypeSchema.enum.number: return directNumberValueSchema.extend({ content: getTranslatableNumberValueContentSchemaFromFieldDefinition(fieldDefinition) });
|
|
872
|
+
case ValueTypeSchema.enum.string: return directStringValueSchema.extend({ content: getTranslatableStringValueContentSchemaFromFieldDefinition(fieldDefinition) });
|
|
873
|
+
case ValueTypeSchema.enum.reference: return referencedValueSchema.extend({ content: getTranslatableReferenceValueContentSchemaFromFieldDefinition(fieldDefinition) });
|
|
874
|
+
default: throw new Error(`Error generating schema for unsupported ValueType "${fieldDefinition.valueType}"`);
|
|
875
|
+
}
|
|
876
|
+
}
|
|
877
|
+
/**
|
|
878
|
+
* Generates a schema for creating a new Entry based on the given Field definitions and Values
|
|
879
|
+
*/
|
|
880
|
+
function getCreateEntrySchemaFromFieldDefinitions(fieldDefinitions) {
|
|
881
|
+
const valueSchemas = fieldDefinitions.map((fieldDefinition) => {
|
|
882
|
+
return getValueSchemaFromFieldDefinition(fieldDefinition);
|
|
883
|
+
});
|
|
884
|
+
return z.object({
|
|
885
|
+
...createEntrySchema.shape,
|
|
886
|
+
values: z.tuple(valueSchemas)
|
|
887
|
+
});
|
|
888
|
+
}
|
|
889
|
+
/**
|
|
890
|
+
* Generates a schema for updating an existing Entry based on the given Field definitions and Values
|
|
891
|
+
*/
|
|
892
|
+
function getUpdateEntrySchemaFromFieldDefinitions(fieldDefinitions) {
|
|
893
|
+
const valueSchemas = fieldDefinitions.map((fieldDefinition) => {
|
|
894
|
+
return getValueSchemaFromFieldDefinition(fieldDefinition);
|
|
895
|
+
});
|
|
896
|
+
return z.object({
|
|
897
|
+
...updateEntrySchema.shape,
|
|
898
|
+
values: z.tuple(valueSchemas)
|
|
899
|
+
});
|
|
900
|
+
}
|
|
901
|
+
|
|
902
|
+
//#endregion
|
|
903
|
+
//#region src/schema/serviceSchema.ts
|
|
904
|
+
const serviceTypeSchema = z.enum([
|
|
905
|
+
"Git",
|
|
906
|
+
"GitTag",
|
|
907
|
+
"User",
|
|
908
|
+
"Project",
|
|
909
|
+
"Asset",
|
|
910
|
+
"JsonFile",
|
|
911
|
+
"Search",
|
|
912
|
+
"Collection",
|
|
913
|
+
"Entry",
|
|
914
|
+
"Value"
|
|
915
|
+
]);
|
|
916
|
+
function paginatedListOf(schema) {
|
|
917
|
+
return z.object({
|
|
918
|
+
total: z.number(),
|
|
919
|
+
limit: z.number(),
|
|
920
|
+
offset: z.number(),
|
|
921
|
+
list: z.array(schema)
|
|
922
|
+
});
|
|
923
|
+
}
|
|
924
|
+
const listSchema = z.object({
|
|
925
|
+
projectId: uuidSchema,
|
|
926
|
+
limit: z.number().optional(),
|
|
927
|
+
offset: z.number().optional()
|
|
928
|
+
});
|
|
929
|
+
const listCollectionsSchema = listSchema;
|
|
930
|
+
const listEntriesSchema = listSchema.extend({ collectionId: uuidSchema });
|
|
931
|
+
const listAssetsSchema = listSchema;
|
|
932
|
+
const listProjectsSchema = listSchema.omit({ projectId: true });
|
|
933
|
+
const listGitTagsSchema = z.object({ path: z.string() });
|
|
934
|
+
|
|
935
|
+
//#endregion
|
|
936
|
+
//#region src/schema/userSchema.ts
|
|
937
|
+
const UserTypeSchema = z.enum(["local", "cloud"]);
|
|
938
|
+
const baseUserSchema = gitSignatureSchema.extend({
|
|
939
|
+
userType: UserTypeSchema,
|
|
940
|
+
language: supportedLanguageSchema,
|
|
941
|
+
localApi: z.object({
|
|
942
|
+
isEnabled: z.boolean(),
|
|
943
|
+
port: z.number()
|
|
944
|
+
})
|
|
945
|
+
});
|
|
946
|
+
const localUserSchema = baseUserSchema.extend({ userType: z.literal(UserTypeSchema.enum.local) });
|
|
947
|
+
const cloudUserSchema = baseUserSchema.extend({
|
|
948
|
+
userType: z.literal(UserTypeSchema.enum.cloud),
|
|
949
|
+
id: uuidSchema
|
|
950
|
+
});
|
|
951
|
+
const userFileSchema = z.union([localUserSchema, cloudUserSchema]);
|
|
952
|
+
const userSchema = userFileSchema;
|
|
953
|
+
const setUserSchema = userSchema;
|
|
954
|
+
|
|
955
|
+
//#endregion
|
|
956
|
+
//#region src/schema/cliSchema.ts
|
|
957
|
+
const outDirSchema = z$1.string().default("./.elek.io");
|
|
958
|
+
const languageSchema = z$1.enum(["ts", "js"]).default("ts");
|
|
959
|
+
const formatSchema = z$1.enum(["esm", "cjs"]).default("esm");
|
|
960
|
+
const targetSchema = z$1.enum([
|
|
961
|
+
"es3",
|
|
962
|
+
"es5",
|
|
963
|
+
"es6",
|
|
964
|
+
"es2015",
|
|
965
|
+
"es2016",
|
|
966
|
+
"es2017",
|
|
967
|
+
"es2018",
|
|
968
|
+
"es2019",
|
|
969
|
+
"es2020",
|
|
970
|
+
"es2021",
|
|
971
|
+
"es2022",
|
|
972
|
+
"es2023",
|
|
973
|
+
"es2024",
|
|
974
|
+
"esnext"
|
|
975
|
+
]).default("es2020");
|
|
976
|
+
const projectsSchema = z$1.string().default("all").transform((value) => {
|
|
977
|
+
if (value === "all") return "all";
|
|
978
|
+
return value.split(",").map((v) => uuidSchema.parse(v.trim()));
|
|
979
|
+
});
|
|
980
|
+
const generateApiClientOptionsSchema = z$1.object({ watch: z$1.boolean().default(false) });
|
|
981
|
+
const exportProjectsOptionsSchema = generateApiClientOptionsSchema.extend({ watch: z$1.boolean().default(false) });
|
|
982
|
+
const generateApiClientSchema = z$1.object({
|
|
983
|
+
outDir: outDirSchema,
|
|
984
|
+
language: languageSchema,
|
|
985
|
+
format: formatSchema,
|
|
986
|
+
target: targetSchema,
|
|
987
|
+
options: generateApiClientOptionsSchema
|
|
988
|
+
});
|
|
989
|
+
const portSchema = z$1.string().default("31310").transform((value, context) => {
|
|
990
|
+
try {
|
|
991
|
+
return parseInt(value);
|
|
992
|
+
} catch (_error) {
|
|
993
|
+
context.addIssue({
|
|
994
|
+
code: "custom",
|
|
995
|
+
message: "Invalid port number",
|
|
996
|
+
input: value
|
|
997
|
+
});
|
|
998
|
+
return z$1.NEVER;
|
|
999
|
+
}
|
|
1000
|
+
});
|
|
1001
|
+
const apiStartSchema = z$1.object({ port: portSchema });
|
|
1002
|
+
const exportSchema = z$1.object({
|
|
1003
|
+
outDir: outDirSchema,
|
|
1004
|
+
projects: projectsSchema,
|
|
1005
|
+
template: z$1.enum(["nested", "separate"]).default("nested"),
|
|
1006
|
+
options: exportProjectsOptionsSchema
|
|
1007
|
+
});
|
|
1008
|
+
|
|
1009
|
+
//#endregion
|
|
1010
|
+
//#region src/schema/logSchema.ts
|
|
1011
|
+
const logSourceSchema = z.enum(["core", "desktop"]);
|
|
1012
|
+
const logSchema = z.object({
|
|
1013
|
+
source: logSourceSchema,
|
|
1014
|
+
message: z.string(),
|
|
1015
|
+
meta: z.record(z.string(), z.unknown()).optional()
|
|
1016
|
+
});
|
|
1017
|
+
const logConsoleTransportSchema = logSchema.extend({
|
|
1018
|
+
timestamp: z.string(),
|
|
1019
|
+
level: z.string()
|
|
1020
|
+
});
|
|
1021
|
+
|
|
1022
|
+
//#endregion
|
|
1023
|
+
//#region src/api/routes/content/v1/projects.ts
|
|
1024
|
+
const tags$3 = ["Content API v1"];
|
|
1025
|
+
const router$6 = createRouter().openapi(createRoute({
|
|
1026
|
+
summary: "List Projects",
|
|
1027
|
+
description: "Lists all Projects you currently have access to",
|
|
1028
|
+
method: "get",
|
|
1029
|
+
path: "/",
|
|
1030
|
+
tags: tags$3,
|
|
1031
|
+
request: { query: z.object({
|
|
1032
|
+
limit: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1033
|
+
default: 15,
|
|
1034
|
+
description: "The maximum number of Projects to return"
|
|
1035
|
+
}),
|
|
1036
|
+
offset: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1037
|
+
default: 0,
|
|
1038
|
+
description: "The number of Projects to skip before starting to collect the result set"
|
|
1039
|
+
})
|
|
1040
|
+
}) },
|
|
1041
|
+
responses: { [200]: {
|
|
1042
|
+
content: { "application/json": { schema: paginatedListOf(projectSchema) } },
|
|
1043
|
+
description: "A list of Projects you have access to"
|
|
1044
|
+
} }
|
|
1045
|
+
}), async (c) => {
|
|
1046
|
+
const { limit, offset } = c.req.valid("query");
|
|
1047
|
+
const projects = await c.var.projectService.list({
|
|
1048
|
+
limit,
|
|
1049
|
+
offset
|
|
1050
|
+
});
|
|
1051
|
+
return c.json(projects, 200);
|
|
1052
|
+
}).openapi(createRoute({
|
|
1053
|
+
summary: "Count Projects",
|
|
1054
|
+
description: "Counts all Projects you currently have access to",
|
|
1055
|
+
method: "get",
|
|
1056
|
+
path: "/count",
|
|
1057
|
+
tags: tags$3,
|
|
1058
|
+
responses: { [200]: {
|
|
1059
|
+
content: { "application/json": { schema: z.number() } },
|
|
1060
|
+
description: "The number of Projects you have access to"
|
|
1061
|
+
} }
|
|
1062
|
+
}), async (c) => {
|
|
1063
|
+
const count = await c.var.projectService.count();
|
|
1064
|
+
return c.json(count, 200);
|
|
1065
|
+
}).openapi(createRoute({
|
|
1066
|
+
summary: "Get one Project",
|
|
1067
|
+
description: "Retrieve a Project by ID",
|
|
1068
|
+
method: "get",
|
|
1069
|
+
path: "/{projectId}",
|
|
1070
|
+
tags: tags$3,
|
|
1071
|
+
request: { params: z.object({ projectId: uuidSchema.openapi({ param: {
|
|
1072
|
+
name: "projectId",
|
|
1073
|
+
in: "path"
|
|
1074
|
+
} }) }) },
|
|
1075
|
+
responses: { [200]: {
|
|
1076
|
+
content: { "application/json": { schema: projectSchema } },
|
|
1077
|
+
description: "The requested Project"
|
|
1078
|
+
} }
|
|
1079
|
+
}), async (c) => {
|
|
1080
|
+
const { projectId } = c.req.valid("param");
|
|
1081
|
+
const project = await c.var.projectService.read({ id: projectId });
|
|
1082
|
+
return c.json(project, 200);
|
|
1083
|
+
});
|
|
1084
|
+
|
|
1085
|
+
//#endregion
|
|
1086
|
+
//#region src/api/routes/content/v1/collections.ts
|
|
1087
|
+
const tags$2 = ["Content API v1"];
|
|
1088
|
+
const router$5 = createRouter().openapi(createRoute({
|
|
1089
|
+
summary: "List Collections",
|
|
1090
|
+
description: "Lists all Collections of the given Project",
|
|
1091
|
+
method: "get",
|
|
1092
|
+
path: "/{projectId}/collections",
|
|
1093
|
+
tags: tags$2,
|
|
1094
|
+
request: {
|
|
1095
|
+
params: z.object({ projectId: uuidSchema.openapi({ param: {
|
|
1096
|
+
name: "projectId",
|
|
1097
|
+
in: "path"
|
|
1098
|
+
} }) }),
|
|
1099
|
+
query: z.object({
|
|
1100
|
+
limit: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1101
|
+
default: "15",
|
|
1102
|
+
description: "The maximum number of Collections to return"
|
|
1103
|
+
}),
|
|
1104
|
+
offset: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1105
|
+
default: "0",
|
|
1106
|
+
description: "The number of Collections to skip before starting to collect the result set"
|
|
1107
|
+
})
|
|
1108
|
+
})
|
|
1109
|
+
},
|
|
1110
|
+
responses: { [200]: {
|
|
1111
|
+
content: { "application/json": { schema: paginatedListOf(collectionSchema) } },
|
|
1112
|
+
description: "A list of Collections for the given Project"
|
|
1113
|
+
} }
|
|
1114
|
+
}), async (c) => {
|
|
1115
|
+
const { projectId } = c.req.valid("param");
|
|
1116
|
+
const { limit, offset } = c.req.valid("query");
|
|
1117
|
+
const collections = await c.var.collectionService.list({
|
|
1118
|
+
projectId,
|
|
1119
|
+
limit,
|
|
1120
|
+
offset
|
|
1121
|
+
});
|
|
1122
|
+
return c.json(collections, 200);
|
|
1123
|
+
}).openapi(createRoute({
|
|
1124
|
+
summary: "Count Collections",
|
|
1125
|
+
description: "Counts all Collections of the given Project",
|
|
1126
|
+
method: "get",
|
|
1127
|
+
path: "/{projectId}/collections/count",
|
|
1128
|
+
tags: tags$2,
|
|
1129
|
+
request: { params: z.object({ projectId: uuidSchema.openapi({ param: {
|
|
1130
|
+
name: "projectId",
|
|
1131
|
+
in: "path"
|
|
1132
|
+
} }) }) },
|
|
1133
|
+
responses: { [200]: {
|
|
1134
|
+
content: { "application/json": { schema: z.number() } },
|
|
1135
|
+
description: "The number of Collections of the given Project"
|
|
1136
|
+
} }
|
|
1137
|
+
}), async (c) => {
|
|
1138
|
+
const { projectId } = c.req.valid("param");
|
|
1139
|
+
const count = await c.var.collectionService.count({ projectId });
|
|
1140
|
+
return c.json(count, 200);
|
|
1141
|
+
}).openapi(createRoute({
|
|
1142
|
+
summary: "Get one Collection",
|
|
1143
|
+
description: "Retrieve a Collection by ID",
|
|
1144
|
+
method: "get",
|
|
1145
|
+
path: "/{projectId}/collections/{collectionId}",
|
|
1146
|
+
tags: tags$2,
|
|
1147
|
+
request: { params: z.object({
|
|
1148
|
+
projectId: uuidSchema.openapi({ param: {
|
|
1149
|
+
name: "projectId",
|
|
1150
|
+
in: "path"
|
|
1151
|
+
} }),
|
|
1152
|
+
collectionId: uuidSchema.openapi({ param: {
|
|
1153
|
+
name: "collectionId",
|
|
1154
|
+
in: "path"
|
|
1155
|
+
} })
|
|
1156
|
+
}) },
|
|
1157
|
+
responses: { [200]: {
|
|
1158
|
+
content: { "application/json": { schema: collectionSchema } },
|
|
1159
|
+
description: "The requested Collection"
|
|
1160
|
+
} }
|
|
1161
|
+
}), async (c) => {
|
|
1162
|
+
const { projectId, collectionId } = c.req.valid("param");
|
|
1163
|
+
const collection = await c.var.collectionService.read({
|
|
1164
|
+
projectId,
|
|
1165
|
+
id: collectionId
|
|
1166
|
+
});
|
|
1167
|
+
return c.json(collection, 200);
|
|
1168
|
+
});
|
|
1169
|
+
|
|
1170
|
+
//#endregion
|
|
1171
|
+
//#region src/api/routes/content/v1/entries.ts
|
|
1172
|
+
const tags$1 = ["Content API v1"];
|
|
1173
|
+
const router$4 = createRouter().openapi(createRoute({
|
|
1174
|
+
summary: "List Entries",
|
|
1175
|
+
description: "Lists all Entries of the given Projects Collection",
|
|
1176
|
+
method: "get",
|
|
1177
|
+
path: "/{projectId}/collections/{collectionId}/entries",
|
|
1178
|
+
tags: tags$1,
|
|
1179
|
+
request: {
|
|
1180
|
+
params: z.object({
|
|
1181
|
+
projectId: uuidSchema.openapi({ param: {
|
|
1182
|
+
name: "projectId",
|
|
1183
|
+
in: "path"
|
|
1184
|
+
} }),
|
|
1185
|
+
collectionId: uuidSchema.openapi({ param: {
|
|
1186
|
+
name: "collectionId",
|
|
1187
|
+
in: "path"
|
|
1188
|
+
} })
|
|
1189
|
+
}),
|
|
1190
|
+
query: z.object({
|
|
1191
|
+
limit: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1192
|
+
default: "15",
|
|
1193
|
+
description: "The maximum number of Entries to return"
|
|
1194
|
+
}),
|
|
1195
|
+
offset: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1196
|
+
default: "0",
|
|
1197
|
+
description: "The number of Entries to skip before starting to collect the result set"
|
|
1198
|
+
})
|
|
1199
|
+
})
|
|
1200
|
+
},
|
|
1201
|
+
responses: { [200]: {
|
|
1202
|
+
content: { "application/json": { schema: paginatedListOf(entrySchema) } },
|
|
1203
|
+
description: "A list of Entries for the given Projects Collection"
|
|
1204
|
+
} }
|
|
1205
|
+
}), async (c) => {
|
|
1206
|
+
const { projectId, collectionId } = c.req.valid("param");
|
|
1207
|
+
const { limit, offset } = c.req.valid("query");
|
|
1208
|
+
const entries = await c.var.entryService.list({
|
|
1209
|
+
projectId,
|
|
1210
|
+
collectionId,
|
|
1211
|
+
limit,
|
|
1212
|
+
offset
|
|
1213
|
+
});
|
|
1214
|
+
return c.json(entries, 200);
|
|
1215
|
+
}).openapi(createRoute({
|
|
1216
|
+
summary: "Count Entries",
|
|
1217
|
+
description: "Counts all Entries of the given Projects Collection",
|
|
1218
|
+
method: "get",
|
|
1219
|
+
path: "/{projectId}/collections/{collectionId}/entries/count",
|
|
1220
|
+
tags: tags$1,
|
|
1221
|
+
request: { params: z.object({
|
|
1222
|
+
projectId: uuidSchema.openapi({ param: {
|
|
1223
|
+
name: "projectId",
|
|
1224
|
+
in: "path"
|
|
1225
|
+
} }),
|
|
1226
|
+
collectionId: uuidSchema.openapi({ param: {
|
|
1227
|
+
name: "collectionId",
|
|
1228
|
+
in: "path"
|
|
1229
|
+
} })
|
|
1230
|
+
}) },
|
|
1231
|
+
responses: { [200]: {
|
|
1232
|
+
content: { "application/json": { schema: z.number() } },
|
|
1233
|
+
description: "The number of Entries of the given Projects Collection"
|
|
1234
|
+
} }
|
|
1235
|
+
}), async (c) => {
|
|
1236
|
+
const { projectId, collectionId } = c.req.valid("param");
|
|
1237
|
+
const count = await c.var.entryService.count({
|
|
1238
|
+
projectId,
|
|
1239
|
+
collectionId
|
|
1240
|
+
});
|
|
1241
|
+
return c.json(count, 200);
|
|
1242
|
+
}).openapi(createRoute({
|
|
1243
|
+
summary: "Get one Entry",
|
|
1244
|
+
description: "Retrieve an Entry by ID",
|
|
1245
|
+
method: "get",
|
|
1246
|
+
path: "/{projectId}/collections/{collectionId}/entries/{entryId}",
|
|
1247
|
+
tags: tags$1,
|
|
1248
|
+
request: { params: z.object({
|
|
1249
|
+
projectId: uuidSchema.openapi({ param: {
|
|
1250
|
+
name: "projectId",
|
|
1251
|
+
in: "path"
|
|
1252
|
+
} }),
|
|
1253
|
+
collectionId: uuidSchema.openapi({ param: {
|
|
1254
|
+
name: "collectionId",
|
|
1255
|
+
in: "path"
|
|
1256
|
+
} }),
|
|
1257
|
+
entryId: uuidSchema.openapi({ param: {
|
|
1258
|
+
name: "entryId",
|
|
1259
|
+
in: "path"
|
|
1260
|
+
} })
|
|
1261
|
+
}) },
|
|
1262
|
+
responses: { [200]: {
|
|
1263
|
+
content: { "application/json": { schema: entrySchema } },
|
|
1264
|
+
description: "The requested Entry"
|
|
1265
|
+
} }
|
|
1266
|
+
}), async (c) => {
|
|
1267
|
+
const { projectId, collectionId, entryId } = c.req.valid("param");
|
|
1268
|
+
const entry = await c.var.entryService.read({
|
|
1269
|
+
projectId,
|
|
1270
|
+
collectionId,
|
|
1271
|
+
id: entryId
|
|
1272
|
+
});
|
|
1273
|
+
return c.json(entry, 200);
|
|
1274
|
+
});
|
|
1275
|
+
|
|
1276
|
+
//#endregion
|
|
1277
|
+
//#region src/api/routes/content/v1/assets.ts
|
|
1278
|
+
const tags = ["Content API v1"];
|
|
1279
|
+
const router$3 = createRouter().openapi(createRoute({
|
|
1280
|
+
summary: "List Assets",
|
|
1281
|
+
description: "Lists all Assets of the given Project",
|
|
1282
|
+
method: "get",
|
|
1283
|
+
path: "/{projectId}/assets",
|
|
1284
|
+
tags,
|
|
1285
|
+
request: {
|
|
1286
|
+
params: z.object({ projectId: uuidSchema.openapi({ param: {
|
|
1287
|
+
name: "projectId",
|
|
1288
|
+
in: "path"
|
|
1289
|
+
} }) }),
|
|
1290
|
+
query: z.object({
|
|
1291
|
+
limit: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1292
|
+
default: "15",
|
|
1293
|
+
description: "The maximum number of Assets to return"
|
|
1294
|
+
}),
|
|
1295
|
+
offset: z.string().pipe(z.coerce.number()).optional().openapi({
|
|
1296
|
+
default: "0",
|
|
1297
|
+
description: "The number of Assets to skip before starting to collect the result set"
|
|
1298
|
+
})
|
|
1299
|
+
})
|
|
1300
|
+
},
|
|
1301
|
+
responses: { [200]: {
|
|
1302
|
+
content: { "application/json": { schema: paginatedListOf(assetSchema) } },
|
|
1303
|
+
description: "A list of Assets for the given Project"
|
|
1304
|
+
} }
|
|
1305
|
+
}), async (c) => {
|
|
1306
|
+
const { projectId } = c.req.valid("param");
|
|
1307
|
+
const { limit, offset } = c.req.valid("query");
|
|
1308
|
+
const assets = await c.var.assetService.list({
|
|
1309
|
+
projectId,
|
|
1310
|
+
limit,
|
|
1311
|
+
offset
|
|
1312
|
+
});
|
|
1313
|
+
return c.json(assets, 200);
|
|
1314
|
+
}).openapi(createRoute({
|
|
1315
|
+
summary: "Count Assets",
|
|
1316
|
+
description: "Counts all Assets of the given Project",
|
|
1317
|
+
method: "get",
|
|
1318
|
+
path: "/{projectId}/assets/count",
|
|
1319
|
+
tags,
|
|
1320
|
+
request: { params: z.object({ projectId: uuidSchema.openapi({ param: {
|
|
1321
|
+
name: "projectId",
|
|
1322
|
+
in: "path"
|
|
1323
|
+
} }) }) },
|
|
1324
|
+
responses: { [200]: {
|
|
1325
|
+
content: { "application/json": { schema: z.number() } },
|
|
1326
|
+
description: "The number of Assets of the given Project"
|
|
1327
|
+
} }
|
|
1328
|
+
}), async (c) => {
|
|
1329
|
+
const { projectId } = c.req.valid("param");
|
|
1330
|
+
const count = await c.var.assetService.count({ projectId });
|
|
1331
|
+
return c.json(count, 200);
|
|
1332
|
+
}).openapi(createRoute({
|
|
1333
|
+
summary: "Get one Asset",
|
|
1334
|
+
description: "Retrieve an Asset by ID",
|
|
1335
|
+
method: "get",
|
|
1336
|
+
path: "/{projectId}/assets/{assetId}",
|
|
1337
|
+
tags,
|
|
1338
|
+
request: { params: z.object({
|
|
1339
|
+
projectId: uuidSchema.openapi({ param: {
|
|
1340
|
+
name: "projectId",
|
|
1341
|
+
in: "path"
|
|
1342
|
+
} }),
|
|
1343
|
+
assetId: uuidSchema.openapi({ param: {
|
|
1344
|
+
name: "assetId",
|
|
1345
|
+
in: "path"
|
|
1346
|
+
} })
|
|
1347
|
+
}) },
|
|
1348
|
+
responses: { [200]: {
|
|
1349
|
+
content: { "application/json": { schema: assetSchema } },
|
|
1350
|
+
description: "The requested Asset"
|
|
1351
|
+
} }
|
|
1352
|
+
}), async (c) => {
|
|
1353
|
+
const { projectId, assetId } = c.req.valid("param");
|
|
1354
|
+
const asset = await c.var.assetService.read({
|
|
1355
|
+
projectId,
|
|
1356
|
+
id: assetId
|
|
1357
|
+
});
|
|
1358
|
+
return c.json(asset, 200);
|
|
1359
|
+
});
|
|
1360
|
+
|
|
1361
|
+
//#endregion
|
|
1362
|
+
//#region src/api/routes/content/v1/index.ts
|
|
1363
|
+
const router$2 = createRouter().route("/projects", router$6).route("/projects", router$5).route("/projects", router$4).route("/projects", router$3);
|
|
1364
|
+
|
|
1365
|
+
//#endregion
|
|
1366
|
+
//#region src/api/routes/content/index.ts
|
|
1367
|
+
const router$1 = createRouter().route("/v1", router$2);
|
|
1368
|
+
|
|
1369
|
+
//#endregion
|
|
1370
|
+
//#region src/api/routes/index.ts
|
|
1371
|
+
const router = createRouter().route("/content", router$1);
|
|
1372
|
+
|
|
1373
|
+
//#endregion
|
|
1374
|
+
//#region src/api/index.ts
|
|
1375
|
+
var LocalApi = class {
|
|
1376
|
+
logService;
|
|
1377
|
+
projectService;
|
|
1378
|
+
collectionService;
|
|
1379
|
+
entryService;
|
|
1380
|
+
assetService;
|
|
1381
|
+
api;
|
|
1382
|
+
server = null;
|
|
1383
|
+
constructor(logService, projectService, collectionService, entryService, assetService) {
|
|
1384
|
+
this.logService = logService;
|
|
1385
|
+
this.projectService = projectService;
|
|
1386
|
+
this.collectionService = collectionService;
|
|
1387
|
+
this.entryService = entryService;
|
|
1388
|
+
this.assetService = assetService;
|
|
1389
|
+
this.api = createApi(this.logService, this.projectService, this.collectionService, this.entryService, this.assetService).route("/", router).doc("/openapi.json", {
|
|
1390
|
+
openapi: "3.0.0",
|
|
1391
|
+
externalDocs: { url: "https://elek.io/docs" },
|
|
1392
|
+
info: {
|
|
1393
|
+
version: "0.1.0",
|
|
1394
|
+
title: "elek.io local API",
|
|
1395
|
+
description: "This API allows reading content from local elek.io Projects. You can use this API for development and building static websites and applications locally."
|
|
1396
|
+
},
|
|
1397
|
+
servers: [{
|
|
1398
|
+
url: "http://localhost:{port}",
|
|
1399
|
+
description: "elek.io local API",
|
|
1400
|
+
variables: { port: {
|
|
1401
|
+
default: 31310,
|
|
1402
|
+
description: "The port specified in elek.io Clients user configuration"
|
|
1403
|
+
} }
|
|
1404
|
+
}],
|
|
1405
|
+
tags: [{
|
|
1406
|
+
name: "Content API v1",
|
|
1407
|
+
description: "Version 1 of the elek.io content API lets you read Projects, Collections, Entries and Assets. \n### Resources\n - [Projects](https://elek.io/docs/projects)\n - [Collections](https://elek.io/docs/collections)\n - [Entries](https://elek.io/docs/entries)\n - [Assets](https://elek.io/docs/assets)"
|
|
1408
|
+
}]
|
|
1409
|
+
});
|
|
1410
|
+
this.api.get("/", Scalar({
|
|
1411
|
+
pageTitle: "elek.io local API",
|
|
1412
|
+
url: "/openapi.json",
|
|
1413
|
+
theme: "kepler",
|
|
1414
|
+
layout: "modern",
|
|
1415
|
+
defaultHttpClient: {
|
|
1416
|
+
targetKey: "js",
|
|
1417
|
+
clientKey: "fetch"
|
|
1418
|
+
}
|
|
1419
|
+
}));
|
|
1420
|
+
}
|
|
1421
|
+
/**
|
|
1422
|
+
* Starts the local API on given port
|
|
1423
|
+
*/
|
|
1424
|
+
start(port) {
|
|
1425
|
+
this.server = serve({
|
|
1426
|
+
fetch: this.api.fetch,
|
|
1427
|
+
port
|
|
1428
|
+
}, (info) => {
|
|
1429
|
+
this.logService.info({
|
|
1430
|
+
source: "core",
|
|
1431
|
+
message: `Started local API on http://localhost:${info.port}`
|
|
1432
|
+
});
|
|
1433
|
+
});
|
|
1434
|
+
}
|
|
1435
|
+
/**
|
|
1436
|
+
* Stops the local API
|
|
1437
|
+
*/
|
|
1438
|
+
stop() {
|
|
1439
|
+
this.server?.close(() => {
|
|
1440
|
+
this.logService.info({
|
|
1441
|
+
source: "core",
|
|
1442
|
+
message: "Stopped local API"
|
|
1443
|
+
});
|
|
1444
|
+
});
|
|
1445
|
+
}
|
|
1446
|
+
/**
|
|
1447
|
+
* Returns true if the local API is running
|
|
1448
|
+
*/
|
|
1449
|
+
isRunning() {
|
|
1450
|
+
if (this.server?.listening) return true;
|
|
1451
|
+
return false;
|
|
1452
|
+
}
|
|
1453
|
+
};
|
|
1454
|
+
|
|
1455
|
+
//#endregion
|
|
1456
|
+
//#region src/error/GitError.ts
|
|
1457
|
+
var GitError = class extends Error {
|
|
1458
|
+
constructor(message) {
|
|
1459
|
+
super(message);
|
|
1460
|
+
this.name = "GitError";
|
|
1461
|
+
}
|
|
1462
|
+
};
|
|
1463
|
+
|
|
1464
|
+
//#endregion
|
|
1465
|
+
//#region src/error/NoCurrentUserError.ts
|
|
1466
|
+
var NoCurrentUserError = class extends Error {
|
|
1467
|
+
constructor() {
|
|
1468
|
+
super("Make sure to set a User via Core before using other methods");
|
|
1469
|
+
this.name = "NoCurrentUserError";
|
|
1470
|
+
}
|
|
1471
|
+
};
|
|
1472
|
+
|
|
1473
|
+
//#endregion
|
|
1474
|
+
//#region src/error/ProjectUpgradeError.ts
|
|
1475
|
+
var ProjectUpgradeError = class extends Error {
|
|
1476
|
+
constructor(message) {
|
|
1477
|
+
super(message);
|
|
1478
|
+
this.name = "ProjectUpgradeError";
|
|
1479
|
+
}
|
|
1480
|
+
};
|
|
1481
|
+
|
|
1482
|
+
//#endregion
|
|
1483
|
+
//#region src/error/RequiredParameterMissingError.ts
|
|
1484
|
+
var RequiredParameterMissingError = class extends Error {
|
|
1485
|
+
constructor(parameter) {
|
|
1486
|
+
super(`Missing required parameter "${parameter}"`);
|
|
1487
|
+
this.name = "RequiredParameterMissingError";
|
|
1488
|
+
}
|
|
1489
|
+
};
|
|
1490
|
+
|
|
1491
|
+
//#endregion
|
|
1492
|
+
//#region src/util/node.ts
|
|
1493
|
+
var node_exports = /* @__PURE__ */ __exportAll({
|
|
1494
|
+
execCommand: () => execCommand,
|
|
1495
|
+
files: () => files,
|
|
1496
|
+
folders: () => folders,
|
|
1497
|
+
isNotAnError: () => isNotAnError,
|
|
1498
|
+
isNotEmpty: () => isNotEmpty,
|
|
1499
|
+
pathTo: () => pathTo,
|
|
1500
|
+
workingDirectory: () => workingDirectory
|
|
1501
|
+
});
|
|
1502
|
+
/**
|
|
1503
|
+
* The directory in which everything is stored and will be worked in
|
|
1504
|
+
*
|
|
1505
|
+
* @todo make the workingDirectory an elek option to be set via app.getPath('home') (electron instead of node)?
|
|
1506
|
+
*/
|
|
1507
|
+
const workingDirectory = Path.join(Os.homedir(), "elek.io");
|
|
1508
|
+
/**
|
|
1509
|
+
* A collection of often used paths
|
|
1510
|
+
*/
|
|
1511
|
+
const pathTo = {
|
|
1512
|
+
tmp: Path.join(workingDirectory, "tmp"),
|
|
1513
|
+
userFile: Path.join(workingDirectory, "user.json"),
|
|
1514
|
+
logs: Path.join(workingDirectory, "logs"),
|
|
1515
|
+
projects: Path.join(workingDirectory, "projects"),
|
|
1516
|
+
project: (projectId) => {
|
|
1517
|
+
return Path.join(pathTo.projects, projectId);
|
|
1518
|
+
},
|
|
1519
|
+
projectFile: (projectId) => {
|
|
1520
|
+
return Path.join(pathTo.project(projectId), "project.json");
|
|
1521
|
+
},
|
|
1522
|
+
lfs: (projectId) => {
|
|
1523
|
+
return Path.join(pathTo.project(projectId), projectFolderSchema.enum.lfs);
|
|
1524
|
+
},
|
|
1525
|
+
collections: (projectId) => {
|
|
1526
|
+
return Path.join(pathTo.project(projectId), projectFolderSchema.enum.collections);
|
|
1527
|
+
},
|
|
1528
|
+
collection: (projectId, id) => {
|
|
1529
|
+
return Path.join(pathTo.collections(projectId), id);
|
|
1530
|
+
},
|
|
1531
|
+
collectionFile: (projectId, id) => {
|
|
1532
|
+
return Path.join(pathTo.collection(projectId, id), "collection.json");
|
|
1533
|
+
},
|
|
1534
|
+
entries: (projectId, collectionId) => {
|
|
1535
|
+
return Path.join(pathTo.collection(projectId, collectionId));
|
|
1536
|
+
},
|
|
1537
|
+
entryFile: (projectId, collectionId, id) => {
|
|
1538
|
+
return Path.join(pathTo.entries(projectId, collectionId), `${id}.json`);
|
|
1539
|
+
},
|
|
1540
|
+
sharedValues: (projectId) => {
|
|
1541
|
+
return Path.join(pathTo.project(projectId), "shared-values");
|
|
1542
|
+
},
|
|
1543
|
+
sharedValueFile: (projectId, id, language) => {
|
|
1544
|
+
return Path.join(pathTo.sharedValues(projectId), `${id}.${language}.json`);
|
|
1545
|
+
},
|
|
1546
|
+
assets: (projectId) => {
|
|
1547
|
+
return Path.join(pathTo.project(projectId), projectFolderSchema.enum.assets);
|
|
1548
|
+
},
|
|
1549
|
+
assetFile: (projectId, id) => {
|
|
1550
|
+
return Path.join(pathTo.assets(projectId), `${id}.json`);
|
|
1551
|
+
},
|
|
1552
|
+
asset: (projectId, id, extension) => {
|
|
1553
|
+
return Path.join(pathTo.lfs(projectId), `${id}.${extension}`);
|
|
1554
|
+
},
|
|
1555
|
+
tmpAsset: (id, commitHash, extension) => {
|
|
1556
|
+
return Path.join(pathTo.tmp, `${id}.${commitHash}.${extension}`);
|
|
1557
|
+
}
|
|
1558
|
+
};
|
|
1559
|
+
/**
|
|
1560
|
+
* Used as parameter for filter() methods to assure,
|
|
1561
|
+
* only values not null, undefined or empty strings are returned
|
|
1562
|
+
*
|
|
1563
|
+
* @param value Value to check
|
|
1564
|
+
*/
|
|
1565
|
+
function isNotEmpty(value) {
|
|
1566
|
+
if (value === null || value === void 0) return false;
|
|
1567
|
+
if (typeof value === "string") {
|
|
1568
|
+
if (value.trim() === "") return false;
|
|
1569
|
+
}
|
|
1570
|
+
return true;
|
|
1571
|
+
}
|
|
1572
|
+
/**
|
|
1573
|
+
* Used as parameter for filter() methods to assure,
|
|
1574
|
+
* only items that are not of type Error are returned
|
|
1575
|
+
*
|
|
1576
|
+
* @param item Item to check
|
|
1577
|
+
*/
|
|
1578
|
+
function isNotAnError(item) {
|
|
1579
|
+
return item instanceof Error !== true;
|
|
1580
|
+
}
|
|
1581
|
+
/**
|
|
1582
|
+
* Returns all folders of given path to a directory
|
|
1583
|
+
*/
|
|
1584
|
+
async function folders(path) {
|
|
1585
|
+
return (await Fs.readdir(path, { withFileTypes: true })).filter((dirent) => {
|
|
1586
|
+
return dirent.isDirectory();
|
|
1587
|
+
});
|
|
1588
|
+
}
|
|
1589
|
+
/**
|
|
1590
|
+
* Returns all files of given path to a directory,
|
|
1591
|
+
* which can be filtered by extension
|
|
1592
|
+
*/
|
|
1593
|
+
async function files(path, extension) {
|
|
1594
|
+
return (await Fs.readdir(path, { withFileTypes: true })).filter((dirent) => {
|
|
1595
|
+
if (extension && dirent.isFile() === true) {
|
|
1596
|
+
if (dirent.name.endsWith(extension)) return true;
|
|
1597
|
+
return false;
|
|
1598
|
+
}
|
|
1599
|
+
return dirent.isFile();
|
|
1600
|
+
});
|
|
1601
|
+
}
|
|
1602
|
+
/**
|
|
1603
|
+
* Executes a shell command async and returns the output.
|
|
1604
|
+
*
|
|
1605
|
+
* When on Windows, it will automatically append `.cmd` to the command if it is in the `commandsToSuffix` list.
|
|
1606
|
+
*/
|
|
1607
|
+
function execCommand({ command, args, options, logger }) {
|
|
1608
|
+
return new Promise((resolve, reject) => {
|
|
1609
|
+
const commandsToSuffix = ["pnpm"];
|
|
1610
|
+
const suffixedCommand = Os.platform() === "win32" ? command.split(" ").map((cmd) => commandsToSuffix.includes(cmd) ? `${cmd}.cmd` : cmd).join(" ") : command;
|
|
1611
|
+
const fullCommand = `${suffixedCommand} ${args.join(" ")}`;
|
|
1612
|
+
const execOptions = {
|
|
1613
|
+
...options,
|
|
1614
|
+
shell: true
|
|
1615
|
+
};
|
|
1616
|
+
const start = Date.now();
|
|
1617
|
+
execFile(suffixedCommand, args, execOptions, (error, stdout, stderr) => {
|
|
1618
|
+
const durationMs = Date.now() - start;
|
|
1619
|
+
if (error) {
|
|
1620
|
+
logger.error({
|
|
1621
|
+
source: "core",
|
|
1622
|
+
message: `Error executing command "${fullCommand}" after ${durationMs}ms: ${error.message}`,
|
|
1623
|
+
meta: {
|
|
1624
|
+
error,
|
|
1625
|
+
stdout: stdout.toString(),
|
|
1626
|
+
stderr: stderr.toString()
|
|
1627
|
+
}
|
|
1628
|
+
});
|
|
1629
|
+
reject(error instanceof Error ? error : new Error(error.message));
|
|
1630
|
+
} else {
|
|
1631
|
+
logger.info({
|
|
1632
|
+
source: "core",
|
|
1633
|
+
message: `Command "${fullCommand}" executed successfully in ${durationMs}ms.`,
|
|
1634
|
+
meta: {
|
|
1635
|
+
stdout: stdout.toString(),
|
|
1636
|
+
stderr: stderr.toString()
|
|
1637
|
+
}
|
|
1638
|
+
});
|
|
1639
|
+
resolve({
|
|
1640
|
+
stdout: stdout.toString(),
|
|
1641
|
+
stderr: stderr.toString()
|
|
1642
|
+
});
|
|
1643
|
+
}
|
|
1644
|
+
});
|
|
1645
|
+
});
|
|
1646
|
+
}
|
|
1647
|
+
|
|
1648
|
+
//#endregion
|
|
1649
|
+
//#region src/service/AbstractCrudService.ts
|
|
1650
|
+
/**
|
|
1651
|
+
* A base service that provides properties for most other services
|
|
1652
|
+
*/
|
|
1653
|
+
var AbstractCrudService = class {
|
|
1654
|
+
type;
|
|
1655
|
+
options;
|
|
1656
|
+
logService;
|
|
1657
|
+
/**
|
|
1658
|
+
* Do not instantiate directly as this is an abstract class
|
|
1659
|
+
*/
|
|
1660
|
+
constructor(type, options, logService) {
|
|
1661
|
+
this.type = type;
|
|
1662
|
+
this.options = options;
|
|
1663
|
+
this.logService = logService;
|
|
1664
|
+
}
|
|
1665
|
+
/**
|
|
1666
|
+
* Basically a Promise.all() without rejecting if one promise fails to resolve
|
|
1667
|
+
*/
|
|
1668
|
+
async returnResolved(promises) {
|
|
1669
|
+
const toCheck = [];
|
|
1670
|
+
for (let index = 0; index < promises.length; index++) {
|
|
1671
|
+
const promise = promises[index];
|
|
1672
|
+
if (!promise) throw new Error(`No promise found at index "${index}"`);
|
|
1673
|
+
toCheck.push(promise.then((result) => {
|
|
1674
|
+
return result;
|
|
1675
|
+
}).catch((error) => {
|
|
1676
|
+
const actualError = error instanceof Error ? error : new Error(String(error));
|
|
1677
|
+
this.logService.warn({
|
|
1678
|
+
source: "core",
|
|
1679
|
+
message: `Function "returnResolved" catched an error while resolving a promise: ${actualError.message}`,
|
|
1680
|
+
meta: {
|
|
1681
|
+
error: actualError,
|
|
1682
|
+
promise
|
|
1683
|
+
}
|
|
1684
|
+
});
|
|
1685
|
+
return actualError;
|
|
1686
|
+
}));
|
|
1687
|
+
}
|
|
1688
|
+
return (await Promise.all(toCheck)).filter(isNotAnError);
|
|
1689
|
+
}
|
|
1690
|
+
/**
|
|
1691
|
+
* Returns a list of all file references of given project and type
|
|
1692
|
+
*
|
|
1693
|
+
* @param type File type of the references wanted
|
|
1694
|
+
* @param projectId Project to get all asset references from
|
|
1695
|
+
* @param collectionId Only needed when requesting files of type "Entry"
|
|
1696
|
+
*/
|
|
1697
|
+
async listReferences(type, projectId, collectionId) {
|
|
1698
|
+
switch (type) {
|
|
1699
|
+
case objectTypeSchema.enum.asset:
|
|
1700
|
+
if (!projectId) throw new RequiredParameterMissingError("projectId");
|
|
1701
|
+
return this.getFileReferences(pathTo.lfs(projectId));
|
|
1702
|
+
case objectTypeSchema.enum.project: return this.getFolderReferences(pathTo.projects);
|
|
1703
|
+
case objectTypeSchema.enum.collection:
|
|
1704
|
+
if (!projectId) throw new RequiredParameterMissingError("projectId");
|
|
1705
|
+
return this.getFolderReferences(pathTo.collections(projectId));
|
|
1706
|
+
case objectTypeSchema.enum.entry:
|
|
1707
|
+
if (!projectId) throw new RequiredParameterMissingError("projectId");
|
|
1708
|
+
if (!collectionId) throw new RequiredParameterMissingError("collectionId");
|
|
1709
|
+
return this.getFileReferences(pathTo.collection(projectId, collectionId));
|
|
1710
|
+
case objectTypeSchema.enum.sharedValue:
|
|
1711
|
+
if (!projectId) throw new RequiredParameterMissingError("projectId");
|
|
1712
|
+
return this.getFileReferences(pathTo.sharedValues(projectId));
|
|
1713
|
+
default: throw new Error(`Trying to list files of unsupported type "${type}"`);
|
|
1714
|
+
}
|
|
1715
|
+
}
|
|
1716
|
+
async getFolderReferences(path) {
|
|
1717
|
+
return (await folders(path)).map((possibleFolder) => {
|
|
1718
|
+
const folderReference = { id: possibleFolder.name };
|
|
1719
|
+
try {
|
|
1720
|
+
return fileReferenceSchema.parse(folderReference);
|
|
1721
|
+
} catch {
|
|
1722
|
+
this.logService.warn({
|
|
1723
|
+
source: "core",
|
|
1724
|
+
message: `Function "getFolderReferences" is ignoring folder "${possibleFolder.name}" in "${path}" as it does not match the expected format`
|
|
1725
|
+
});
|
|
1726
|
+
return null;
|
|
1727
|
+
}
|
|
1728
|
+
}).filter(isNotEmpty);
|
|
1729
|
+
}
|
|
1730
|
+
/**
|
|
1731
|
+
* Searches for all files inside given folder,
|
|
1732
|
+
* parses their names and returns them as FileReference
|
|
1733
|
+
*
|
|
1734
|
+
* Ignores files if the extension is not supported.
|
|
1735
|
+
*/
|
|
1736
|
+
async getFileReferences(path) {
|
|
1737
|
+
return (await files(path)).map((possibleFile) => {
|
|
1738
|
+
const fileNameArray = possibleFile.name.split(".");
|
|
1739
|
+
const fileReference = {
|
|
1740
|
+
id: fileNameArray[0],
|
|
1741
|
+
extension: fileNameArray[1]
|
|
1742
|
+
};
|
|
1743
|
+
try {
|
|
1744
|
+
return fileReferenceSchema.parse(fileReference);
|
|
1745
|
+
} catch {
|
|
1746
|
+
this.logService.warn({
|
|
1747
|
+
source: "core",
|
|
1748
|
+
message: `Function "getFileReferences" is ignoring file "${possibleFile.name}" in "${path}" as it does not match the expected format`
|
|
1749
|
+
});
|
|
1750
|
+
return null;
|
|
1751
|
+
}
|
|
1752
|
+
}).filter(isNotEmpty);
|
|
1753
|
+
}
|
|
1754
|
+
};
|
|
1755
|
+
|
|
1756
|
+
//#endregion
|
|
1757
|
+
//#region src/util/shared.ts
|
|
1758
|
+
/**
|
|
1759
|
+
* Returns a new UUID
|
|
1760
|
+
*/
|
|
1761
|
+
function uuid() {
|
|
1762
|
+
return v4();
|
|
1763
|
+
}
|
|
1764
|
+
/**
|
|
1765
|
+
* Returns a string representing date and time
|
|
1766
|
+
* in a simplified format based on ISO 8601.
|
|
1767
|
+
* The timezone is always UTC.
|
|
1768
|
+
*
|
|
1769
|
+
* - If value is not given, the current date and time is used
|
|
1770
|
+
* - If value is given, it's converted to above representation and UTC timezone
|
|
1771
|
+
*
|
|
1772
|
+
* @example 'YYYY-MM-DDTHH:mm:ss.sssZ'
|
|
1773
|
+
*
|
|
1774
|
+
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString
|
|
1775
|
+
* @see https://en.wikipedia.org/wiki/ISO_8601
|
|
1776
|
+
*/
|
|
1777
|
+
function datetime(value) {
|
|
1778
|
+
if (!value) return (/* @__PURE__ */ new Date()).toISOString();
|
|
1779
|
+
return new Date(value).toISOString();
|
|
1780
|
+
}
|
|
1781
|
+
/**
|
|
1782
|
+
* Returns the slug of given string
|
|
1783
|
+
*/
|
|
1784
|
+
function slug(string) {
|
|
1785
|
+
return slugify(string, {
|
|
1786
|
+
separator: "-",
|
|
1787
|
+
lowercase: true,
|
|
1788
|
+
decamelize: true
|
|
1789
|
+
});
|
|
1790
|
+
}
|
|
1791
|
+
|
|
1792
|
+
//#endregion
|
|
1793
|
+
//#region src/service/AssetService.ts
|
|
1794
|
+
/**
|
|
1795
|
+
* Service that manages CRUD functionality for Asset files on disk
|
|
1796
|
+
*/
|
|
1797
|
+
var AssetService = class extends AbstractCrudService {
|
|
1798
|
+
jsonFileService;
|
|
1799
|
+
gitService;
|
|
1800
|
+
constructor(options, logService, jsonFileService, gitService) {
|
|
1801
|
+
super(serviceTypeSchema.enum.Asset, options, logService);
|
|
1802
|
+
this.jsonFileService = jsonFileService;
|
|
1803
|
+
this.gitService = gitService;
|
|
1804
|
+
}
|
|
1805
|
+
/**
|
|
1806
|
+
* Creates a new Asset
|
|
1807
|
+
*/
|
|
1808
|
+
async create(props) {
|
|
1809
|
+
createAssetSchema.parse(props);
|
|
1810
|
+
const id = uuid();
|
|
1811
|
+
const projectPath = pathTo.project(props.projectId);
|
|
1812
|
+
const fileType = this.getFileType(props.filePath);
|
|
1813
|
+
const size = await this.getFileSize(props.filePath);
|
|
1814
|
+
const assetPath = pathTo.asset(props.projectId, id, fileType.extension);
|
|
1815
|
+
const assetFilePath = pathTo.assetFile(props.projectId, id);
|
|
1816
|
+
const assetFile = {
|
|
1817
|
+
...props,
|
|
1818
|
+
name: slug(props.name),
|
|
1819
|
+
objectType: "asset",
|
|
1820
|
+
id,
|
|
1821
|
+
created: datetime(),
|
|
1822
|
+
updated: null,
|
|
1823
|
+
extension: fileType.extension,
|
|
1824
|
+
mimeType: fileType.mimeType,
|
|
1825
|
+
size
|
|
1826
|
+
};
|
|
1827
|
+
try {
|
|
1828
|
+
await Fs.copyFile(props.filePath, assetPath);
|
|
1829
|
+
await this.jsonFileService.create(assetFile, assetFilePath, assetFileSchema);
|
|
1830
|
+
} catch (error) {
|
|
1831
|
+
await this.delete({
|
|
1832
|
+
...assetFile,
|
|
1833
|
+
projectId: props.projectId
|
|
1834
|
+
});
|
|
1835
|
+
throw error;
|
|
1836
|
+
}
|
|
1837
|
+
await this.gitService.add(projectPath, [assetFilePath, assetPath]);
|
|
1838
|
+
await this.gitService.commit(projectPath, {
|
|
1839
|
+
method: "create",
|
|
1840
|
+
reference: {
|
|
1841
|
+
objectType: "asset",
|
|
1842
|
+
id
|
|
1843
|
+
}
|
|
1844
|
+
});
|
|
1845
|
+
return this.toAsset(props.projectId, assetFile);
|
|
1846
|
+
}
|
|
1847
|
+
/**
|
|
1848
|
+
* Returns an Asset by ID
|
|
1849
|
+
*
|
|
1850
|
+
* If a commit hash is provided, the Asset is read from history
|
|
1851
|
+
*/
|
|
1852
|
+
async read(props) {
|
|
1853
|
+
readAssetSchema.parse(props);
|
|
1854
|
+
if (!props.commitHash) {
|
|
1855
|
+
const assetFile = await this.jsonFileService.read(pathTo.assetFile(props.projectId, props.id), assetFileSchema);
|
|
1856
|
+
return this.toAsset(props.projectId, assetFile);
|
|
1857
|
+
} else {
|
|
1858
|
+
const assetFile = this.migrate(JSON.parse(await this.gitService.getFileContentAtCommit(pathTo.project(props.projectId), pathTo.assetFile(props.projectId, props.id), props.commitHash)));
|
|
1859
|
+
const assetBlob = await this.gitService.getFileContentAtCommit(pathTo.project(props.projectId), pathTo.asset(props.projectId, props.id, assetFile.extension), props.commitHash, "binary");
|
|
1860
|
+
await Fs.writeFile(pathTo.tmpAsset(assetFile.id, props.commitHash, assetFile.extension), assetBlob, "binary");
|
|
1861
|
+
return this.toAsset(props.projectId, assetFile, props.commitHash);
|
|
1862
|
+
}
|
|
1863
|
+
}
|
|
1864
|
+
/**
|
|
1865
|
+
* Copies an Asset to given file path on disk
|
|
1866
|
+
*/
|
|
1867
|
+
async save(props) {
|
|
1868
|
+
saveAssetSchema.parse(props);
|
|
1869
|
+
const asset = await this.read(props);
|
|
1870
|
+
await Fs.copyFile(asset.absolutePath, props.filePath);
|
|
1871
|
+
}
|
|
1872
|
+
/**
|
|
1873
|
+
* Updates given Asset
|
|
1874
|
+
*
|
|
1875
|
+
* Use the optional "newFilePath" prop to update the Asset itself
|
|
1876
|
+
*/
|
|
1877
|
+
async update(props) {
|
|
1878
|
+
updateAssetSchema.parse(props);
|
|
1879
|
+
const projectPath = pathTo.project(props.projectId);
|
|
1880
|
+
const assetFilePath = pathTo.assetFile(props.projectId, props.id);
|
|
1881
|
+
const prevAssetFile = await this.read(props);
|
|
1882
|
+
const assetFile = {
|
|
1883
|
+
...prevAssetFile,
|
|
1884
|
+
...props,
|
|
1885
|
+
name: slug(props.name),
|
|
1886
|
+
updated: datetime()
|
|
1887
|
+
};
|
|
1888
|
+
if (props.newFilePath) {
|
|
1889
|
+
const fileType = this.getFileType(props.newFilePath);
|
|
1890
|
+
const size = await this.getFileSize(props.newFilePath);
|
|
1891
|
+
const prevAssetPath = pathTo.asset(props.projectId, props.id, prevAssetFile.extension);
|
|
1892
|
+
const assetPath = pathTo.asset(props.projectId, props.id, fileType.extension);
|
|
1893
|
+
await Fs.remove(prevAssetPath);
|
|
1894
|
+
await Fs.copyFile(props.newFilePath, assetPath);
|
|
1895
|
+
await this.gitService.add(projectPath, [prevAssetPath, assetPath]);
|
|
1896
|
+
assetFile.extension = fileType.extension;
|
|
1897
|
+
assetFile.mimeType = fileType.mimeType;
|
|
1898
|
+
assetFile.size = size;
|
|
1899
|
+
}
|
|
1900
|
+
await this.jsonFileService.update(assetFile, assetFilePath, assetFileSchema);
|
|
1901
|
+
await this.gitService.add(projectPath, [assetFilePath]);
|
|
1902
|
+
await this.gitService.commit(projectPath, {
|
|
1903
|
+
method: "update",
|
|
1904
|
+
reference: {
|
|
1905
|
+
objectType: "asset",
|
|
1906
|
+
id: assetFile.id
|
|
1907
|
+
}
|
|
1908
|
+
});
|
|
1909
|
+
return this.toAsset(props.projectId, assetFile);
|
|
1910
|
+
}
|
|
1911
|
+
/**
|
|
1912
|
+
* Deletes given Asset
|
|
1913
|
+
*/
|
|
1914
|
+
async delete(props) {
|
|
1915
|
+
deleteAssetSchema.parse(props);
|
|
1916
|
+
const projectPath = pathTo.project(props.projectId);
|
|
1917
|
+
const assetFilePath = pathTo.assetFile(props.projectId, props.id);
|
|
1918
|
+
const assetPath = pathTo.asset(props.projectId, props.id, props.extension);
|
|
1919
|
+
await Fs.remove(assetPath);
|
|
1920
|
+
await Fs.remove(assetFilePath);
|
|
1921
|
+
await this.gitService.add(projectPath, [assetFilePath, assetPath]);
|
|
1922
|
+
await this.gitService.commit(projectPath, {
|
|
1923
|
+
method: "delete",
|
|
1924
|
+
reference: {
|
|
1925
|
+
objectType: "asset",
|
|
1926
|
+
id: props.id
|
|
1927
|
+
}
|
|
1928
|
+
});
|
|
1929
|
+
}
|
|
1930
|
+
async list(props) {
|
|
1931
|
+
listAssetsSchema.parse(props);
|
|
1932
|
+
const offset = props.offset || 0;
|
|
1933
|
+
const limit = props.limit ?? 15;
|
|
1934
|
+
const assetReferences = await this.listReferences(objectTypeSchema.enum.asset, props.projectId);
|
|
1935
|
+
const partialAssetReferences = limit === 0 ? assetReferences.slice(offset) : assetReferences.slice(offset, offset + limit);
|
|
1936
|
+
const assets = await this.returnResolved(partialAssetReferences.map((assetReference) => {
|
|
1937
|
+
return this.read({
|
|
1938
|
+
projectId: props.projectId,
|
|
1939
|
+
id: assetReference.id
|
|
1940
|
+
});
|
|
1941
|
+
}));
|
|
1942
|
+
return {
|
|
1943
|
+
total: assetReferences.length,
|
|
1944
|
+
limit,
|
|
1945
|
+
offset,
|
|
1946
|
+
list: assets
|
|
1947
|
+
};
|
|
1948
|
+
}
|
|
1949
|
+
async count(props) {
|
|
1950
|
+
countAssetsSchema.parse(props);
|
|
1951
|
+
return (await this.listReferences(objectTypeSchema.enum.asset, props.projectId)).length;
|
|
1952
|
+
}
|
|
1953
|
+
/**
|
|
1954
|
+
* Checks if given object is of type Asset
|
|
1955
|
+
*/
|
|
1956
|
+
isAsset(obj) {
|
|
1957
|
+
return assetSchema.safeParse(obj).success;
|
|
1958
|
+
}
|
|
1959
|
+
/**
|
|
1960
|
+
* Returns the size of an file in bytes
|
|
1961
|
+
*
|
|
1962
|
+
* @param path Path of the file to get the size from
|
|
1963
|
+
*/
|
|
1964
|
+
async getFileSize(path) {
|
|
1965
|
+
return (await Fs.stat(path)).size;
|
|
1966
|
+
}
|
|
1967
|
+
/**
|
|
1968
|
+
* Creates an Asset from given AssetFile
|
|
1969
|
+
*
|
|
1970
|
+
* @param projectId The project's ID
|
|
1971
|
+
* @param assetFile The AssetFile to convert
|
|
1972
|
+
*/
|
|
1973
|
+
async toAsset(projectId, assetFile, commitHash) {
|
|
1974
|
+
const assetPath = commitHash ? pathTo.tmpAsset(assetFile.id, commitHash, assetFile.extension) : pathTo.asset(projectId, assetFile.id, assetFile.extension);
|
|
1975
|
+
const history = await this.gitService.log(pathTo.project(projectId), { filePath: pathTo.assetFile(projectId, assetFile.id) });
|
|
1976
|
+
return {
|
|
1977
|
+
...assetFile,
|
|
1978
|
+
absolutePath: assetPath,
|
|
1979
|
+
history
|
|
1980
|
+
};
|
|
1981
|
+
}
|
|
1982
|
+
/**
|
|
1983
|
+
* Returns the found and supported extension as well as mime type,
|
|
1984
|
+
* otherwise throws an error
|
|
1985
|
+
*
|
|
1986
|
+
* @param filePath Path to the file to check
|
|
1987
|
+
*/
|
|
1988
|
+
getFileType(filePath) {
|
|
1989
|
+
const mimeType = mime.getType(filePath);
|
|
1990
|
+
if (mimeType === null) throw new Error(`Unsupported MIME type of file "${filePath}"`);
|
|
1991
|
+
const extension = mime.getExtension(mimeType);
|
|
1992
|
+
if (extension === null) throw new Error(`Unsupported extension for MIME type "${mimeType}" of file "${filePath}"`);
|
|
1993
|
+
return {
|
|
1994
|
+
extension,
|
|
1995
|
+
mimeType
|
|
1996
|
+
};
|
|
1997
|
+
}
|
|
1998
|
+
/**
|
|
1999
|
+
* Migrates an potentially outdated Asset file to the current schema
|
|
2000
|
+
*/
|
|
2001
|
+
migrate(potentiallyOutdatedAssetFile) {
|
|
2002
|
+
return assetFileSchema.parse(potentiallyOutdatedAssetFile);
|
|
2003
|
+
}
|
|
2004
|
+
};
|
|
2005
|
+
|
|
2006
|
+
//#endregion
|
|
2007
|
+
//#region src/service/CollectionService.ts
|
|
2008
|
+
/**
|
|
2009
|
+
* Service that manages CRUD functionality for Collection files on disk
|
|
2010
|
+
*/
|
|
2011
|
+
var CollectionService = class extends AbstractCrudService {
|
|
2012
|
+
jsonFileService;
|
|
2013
|
+
gitService;
|
|
2014
|
+
constructor(options, logService, jsonFileService, gitService) {
|
|
2015
|
+
super(serviceTypeSchema.enum.Collection, options, logService);
|
|
2016
|
+
this.jsonFileService = jsonFileService;
|
|
2017
|
+
this.gitService = gitService;
|
|
2018
|
+
}
|
|
2019
|
+
/**
|
|
2020
|
+
* Creates a new Collection
|
|
2021
|
+
*/
|
|
2022
|
+
async create(props) {
|
|
2023
|
+
createCollectionSchema.parse(props);
|
|
2024
|
+
const id = uuid();
|
|
2025
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2026
|
+
const collectionPath = pathTo.collection(props.projectId, id);
|
|
2027
|
+
const collectionFilePath = pathTo.collectionFile(props.projectId, id);
|
|
2028
|
+
const collectionFile = {
|
|
2029
|
+
...props,
|
|
2030
|
+
objectType: "collection",
|
|
2031
|
+
id,
|
|
2032
|
+
slug: {
|
|
2033
|
+
singular: slug(props.slug.singular),
|
|
2034
|
+
plural: slug(props.slug.plural)
|
|
2035
|
+
},
|
|
2036
|
+
created: datetime(),
|
|
2037
|
+
updated: null
|
|
2038
|
+
};
|
|
2039
|
+
await Fs.ensureDir(collectionPath);
|
|
2040
|
+
await this.jsonFileService.create(collectionFile, collectionFilePath, collectionFileSchema);
|
|
2041
|
+
await this.gitService.add(projectPath, [collectionFilePath]);
|
|
2042
|
+
await this.gitService.commit(projectPath, {
|
|
2043
|
+
method: "create",
|
|
2044
|
+
reference: {
|
|
2045
|
+
objectType: "collection",
|
|
2046
|
+
id
|
|
2047
|
+
}
|
|
2048
|
+
});
|
|
2049
|
+
return this.toCollection(props.projectId, collectionFile);
|
|
2050
|
+
}
|
|
2051
|
+
/**
|
|
2052
|
+
* Returns a Collection by ID
|
|
2053
|
+
*
|
|
2054
|
+
* If a commit hash is provided, the Collection is read from history
|
|
2055
|
+
*/
|
|
2056
|
+
async read(props) {
|
|
2057
|
+
readCollectionSchema.parse(props);
|
|
2058
|
+
if (!props.commitHash) {
|
|
2059
|
+
const collectionFile = await this.jsonFileService.read(pathTo.collectionFile(props.projectId, props.id), collectionFileSchema);
|
|
2060
|
+
return this.toCollection(props.projectId, collectionFile);
|
|
2061
|
+
} else {
|
|
2062
|
+
const collectionFile = this.migrate(JSON.parse(await this.gitService.getFileContentAtCommit(pathTo.project(props.projectId), pathTo.collectionFile(props.projectId, props.id), props.commitHash)));
|
|
2063
|
+
return this.toCollection(props.projectId, collectionFile);
|
|
2064
|
+
}
|
|
2065
|
+
}
|
|
2066
|
+
/**
|
|
2067
|
+
* Updates given Collection
|
|
2068
|
+
*
|
|
2069
|
+
* @todo finish implementing checks for FieldDefinitions and extract methods
|
|
2070
|
+
*
|
|
2071
|
+
* @param projectId Project ID of the collection to update
|
|
2072
|
+
* @param collection Collection to write to disk
|
|
2073
|
+
* @returns An object containing information about the actions needed to be taken,
|
|
2074
|
+
* before given update can be executed or void if the update was executed successfully
|
|
2075
|
+
*/
|
|
2076
|
+
async update(props) {
|
|
2077
|
+
updateCollectionSchema.parse(props);
|
|
2078
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2079
|
+
const collectionFilePath = pathTo.collectionFile(props.projectId, props.id);
|
|
2080
|
+
const collectionFile = {
|
|
2081
|
+
...await this.read(props),
|
|
2082
|
+
...props,
|
|
2083
|
+
updated: datetime()
|
|
2084
|
+
};
|
|
2085
|
+
await this.jsonFileService.update(collectionFile, collectionFilePath, collectionFileSchema);
|
|
2086
|
+
await this.gitService.add(projectPath, [collectionFilePath]);
|
|
2087
|
+
await this.gitService.commit(projectPath, {
|
|
2088
|
+
method: "update",
|
|
2089
|
+
reference: {
|
|
2090
|
+
objectType: "collection",
|
|
2091
|
+
id: collectionFile.id
|
|
2092
|
+
}
|
|
2093
|
+
});
|
|
2094
|
+
return this.toCollection(props.projectId, collectionFile);
|
|
2095
|
+
}
|
|
2096
|
+
/**
|
|
2097
|
+
* Deletes given Collection (folder), including it's items
|
|
2098
|
+
*
|
|
2099
|
+
* The Fields that Collection used are not deleted.
|
|
2100
|
+
*/
|
|
2101
|
+
async delete(props) {
|
|
2102
|
+
deleteCollectionSchema.parse(props);
|
|
2103
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2104
|
+
const collectionPath = pathTo.collection(props.projectId, props.id);
|
|
2105
|
+
await Fs.remove(collectionPath);
|
|
2106
|
+
await this.gitService.add(projectPath, [collectionPath]);
|
|
2107
|
+
await this.gitService.commit(projectPath, {
|
|
2108
|
+
method: "delete",
|
|
2109
|
+
reference: {
|
|
2110
|
+
objectType: "collection",
|
|
2111
|
+
id: props.id
|
|
2112
|
+
}
|
|
2113
|
+
});
|
|
2114
|
+
}
|
|
2115
|
+
async list(props) {
|
|
2116
|
+
listCollectionsSchema.parse(props);
|
|
2117
|
+
const offset = props.offset || 0;
|
|
2118
|
+
const limit = props.limit ?? 15;
|
|
2119
|
+
const collectionReferences = await this.listReferences(objectTypeSchema.enum.collection, props.projectId);
|
|
2120
|
+
const partialCollectionReferences = limit === 0 ? collectionReferences.slice(offset) : collectionReferences.slice(offset, offset + limit);
|
|
2121
|
+
const collections = await this.returnResolved(partialCollectionReferences.map((reference) => {
|
|
2122
|
+
return this.read({
|
|
2123
|
+
projectId: props.projectId,
|
|
2124
|
+
id: reference.id
|
|
2125
|
+
});
|
|
2126
|
+
}));
|
|
2127
|
+
return {
|
|
2128
|
+
total: collectionReferences.length,
|
|
2129
|
+
limit,
|
|
2130
|
+
offset,
|
|
2131
|
+
list: collections
|
|
2132
|
+
};
|
|
2133
|
+
}
|
|
2134
|
+
async count(props) {
|
|
2135
|
+
countCollectionsSchema.parse(props);
|
|
2136
|
+
return (await this.listReferences(objectTypeSchema.enum.collection, props.projectId)).length;
|
|
2137
|
+
}
|
|
2138
|
+
/**
|
|
2139
|
+
* Checks if given object is of type Collection
|
|
2140
|
+
*/
|
|
2141
|
+
isCollection(obj) {
|
|
2142
|
+
return collectionFileSchema.safeParse(obj).success;
|
|
2143
|
+
}
|
|
2144
|
+
/**
|
|
2145
|
+
* Migrates an potentially outdated Collection file to the current schema
|
|
2146
|
+
*/
|
|
2147
|
+
migrate(potentiallyOutdatedCollectionFile) {
|
|
2148
|
+
return collectionFileSchema.parse(potentiallyOutdatedCollectionFile);
|
|
2149
|
+
}
|
|
2150
|
+
/**
|
|
2151
|
+
* Creates an Collection from given CollectionFile
|
|
2152
|
+
*
|
|
2153
|
+
* @param projectId The project's ID
|
|
2154
|
+
* @param collectionFile The CollectionFile to convert
|
|
2155
|
+
*/
|
|
2156
|
+
async toCollection(projectId, collectionFile) {
|
|
2157
|
+
const history = await this.gitService.log(pathTo.project(projectId), { filePath: pathTo.collectionFile(projectId, collectionFile.id) });
|
|
2158
|
+
return {
|
|
2159
|
+
...collectionFile,
|
|
2160
|
+
history
|
|
2161
|
+
};
|
|
2162
|
+
}
|
|
2163
|
+
};
|
|
2164
|
+
|
|
2165
|
+
//#endregion
|
|
2166
|
+
//#region src/service/EntryService.ts
|
|
2167
|
+
/**
|
|
2168
|
+
* Service that manages CRUD functionality for Entry files on disk
|
|
2169
|
+
*/
|
|
2170
|
+
var EntryService = class extends AbstractCrudService {
|
|
2171
|
+
jsonFileService;
|
|
2172
|
+
gitService;
|
|
2173
|
+
collectionService;
|
|
2174
|
+
constructor(options, logService, jsonFileService, gitService, collectionService) {
|
|
2175
|
+
super(serviceTypeSchema.enum.Entry, options, logService);
|
|
2176
|
+
this.jsonFileService = jsonFileService;
|
|
2177
|
+
this.gitService = gitService;
|
|
2178
|
+
this.collectionService = collectionService;
|
|
2179
|
+
}
|
|
2180
|
+
/**
|
|
2181
|
+
* Creates a new Entry for given Collection
|
|
2182
|
+
*/
|
|
2183
|
+
async create(props) {
|
|
2184
|
+
createEntrySchema.parse(props);
|
|
2185
|
+
const id = uuid();
|
|
2186
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2187
|
+
const entryFilePath = pathTo.entryFile(props.projectId, props.collectionId, id);
|
|
2188
|
+
const collection = await this.collectionService.read({
|
|
2189
|
+
projectId: props.projectId,
|
|
2190
|
+
id: props.collectionId
|
|
2191
|
+
});
|
|
2192
|
+
const entryFile = {
|
|
2193
|
+
objectType: "entry",
|
|
2194
|
+
id,
|
|
2195
|
+
values: props.values,
|
|
2196
|
+
created: datetime(),
|
|
2197
|
+
updated: null
|
|
2198
|
+
};
|
|
2199
|
+
const entry = await this.toEntry(props.projectId, props.collectionId, entryFile);
|
|
2200
|
+
getCreateEntrySchemaFromFieldDefinitions(collection.fieldDefinitions).parse(props);
|
|
2201
|
+
await this.jsonFileService.create(entryFile, entryFilePath, entryFileSchema);
|
|
2202
|
+
await this.gitService.add(projectPath, [entryFilePath]);
|
|
2203
|
+
await this.gitService.commit(projectPath, {
|
|
2204
|
+
method: "create",
|
|
2205
|
+
reference: {
|
|
2206
|
+
objectType: "entry",
|
|
2207
|
+
id: entryFile.id,
|
|
2208
|
+
collectionId: props.collectionId
|
|
2209
|
+
}
|
|
2210
|
+
});
|
|
2211
|
+
return entry;
|
|
2212
|
+
}
|
|
2213
|
+
/**
|
|
2214
|
+
* Returns an Entry from given Collection by ID
|
|
2215
|
+
*
|
|
2216
|
+
* If a commit hash is provided, the Entry is read from history
|
|
2217
|
+
*/
|
|
2218
|
+
async read(props) {
|
|
2219
|
+
readEntrySchema.parse(props);
|
|
2220
|
+
if (!props.commitHash) {
|
|
2221
|
+
const entryFile = await this.jsonFileService.read(pathTo.entryFile(props.projectId, props.collectionId, props.id), entryFileSchema);
|
|
2222
|
+
return this.toEntry(props.projectId, props.collectionId, entryFile);
|
|
2223
|
+
} else {
|
|
2224
|
+
const entryFile = this.migrate(JSON.parse(await this.gitService.getFileContentAtCommit(pathTo.project(props.projectId), pathTo.entryFile(props.projectId, props.collectionId, props.id), props.commitHash)));
|
|
2225
|
+
return this.toEntry(props.projectId, props.collectionId, entryFile);
|
|
2226
|
+
}
|
|
2227
|
+
}
|
|
2228
|
+
/**
|
|
2229
|
+
* Updates an Entry of given Collection with new Values and shared Values
|
|
2230
|
+
*/
|
|
2231
|
+
async update(props) {
|
|
2232
|
+
updateEntrySchema.parse(props);
|
|
2233
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2234
|
+
const entryFilePath = pathTo.entryFile(props.projectId, props.collectionId, props.id);
|
|
2235
|
+
const collection = await this.collectionService.read({
|
|
2236
|
+
projectId: props.projectId,
|
|
2237
|
+
id: props.collectionId
|
|
2238
|
+
});
|
|
2239
|
+
const entryFile = {
|
|
2240
|
+
...await this.read({
|
|
2241
|
+
projectId: props.projectId,
|
|
2242
|
+
collectionId: props.collectionId,
|
|
2243
|
+
id: props.id
|
|
2244
|
+
}),
|
|
2245
|
+
values: props.values,
|
|
2246
|
+
updated: datetime()
|
|
2247
|
+
};
|
|
2248
|
+
const entry = await this.toEntry(props.projectId, props.collectionId, entryFile);
|
|
2249
|
+
getUpdateEntrySchemaFromFieldDefinitions(collection.fieldDefinitions).parse(props);
|
|
2250
|
+
await this.jsonFileService.update(entryFile, entryFilePath, entryFileSchema);
|
|
2251
|
+
await this.gitService.add(projectPath, [entryFilePath]);
|
|
2252
|
+
await this.gitService.commit(projectPath, {
|
|
2253
|
+
method: "update",
|
|
2254
|
+
reference: {
|
|
2255
|
+
objectType: "entry",
|
|
2256
|
+
id: entryFile.id,
|
|
2257
|
+
collectionId: props.collectionId
|
|
2258
|
+
}
|
|
2259
|
+
});
|
|
2260
|
+
return entry;
|
|
2261
|
+
}
|
|
2262
|
+
/**
|
|
2263
|
+
* Deletes given Entry from it's Collection
|
|
2264
|
+
*/
|
|
2265
|
+
async delete(props) {
|
|
2266
|
+
deleteEntrySchema.parse(props);
|
|
2267
|
+
const projectPath = pathTo.project(props.projectId);
|
|
2268
|
+
const entryFilePath = pathTo.entryFile(props.projectId, props.collectionId, props.id);
|
|
2269
|
+
await Fs.remove(entryFilePath);
|
|
2270
|
+
await this.gitService.add(projectPath, [entryFilePath]);
|
|
2271
|
+
await this.gitService.commit(projectPath, {
|
|
2272
|
+
method: "delete",
|
|
2273
|
+
reference: {
|
|
2274
|
+
objectType: "entry",
|
|
2275
|
+
id: props.id,
|
|
2276
|
+
collectionId: props.collectionId
|
|
2277
|
+
}
|
|
2278
|
+
});
|
|
2279
|
+
}
|
|
2280
|
+
async list(props) {
|
|
2281
|
+
listEntriesSchema.parse(props);
|
|
2282
|
+
const offset = props.offset || 0;
|
|
2283
|
+
const limit = props.limit ?? 15;
|
|
2284
|
+
const entryReferences = await this.listReferences(objectTypeSchema.enum.entry, props.projectId, props.collectionId);
|
|
2285
|
+
const partialEntryReferences = limit === 0 ? entryReferences.slice(offset) : entryReferences.slice(offset, offset + limit);
|
|
2286
|
+
const entries = await this.returnResolved(partialEntryReferences.map((reference) => {
|
|
2287
|
+
return this.read({
|
|
2288
|
+
projectId: props.projectId,
|
|
2289
|
+
collectionId: props.collectionId,
|
|
2290
|
+
id: reference.id
|
|
2291
|
+
});
|
|
2292
|
+
}));
|
|
2293
|
+
return {
|
|
2294
|
+
total: entryReferences.length,
|
|
2295
|
+
limit,
|
|
2296
|
+
offset,
|
|
2297
|
+
list: entries
|
|
2298
|
+
};
|
|
2299
|
+
}
|
|
2300
|
+
async count(props) {
|
|
2301
|
+
countEntriesSchema.parse(props);
|
|
2302
|
+
return (await this.listReferences(objectTypeSchema.enum.entry, props.projectId, props.collectionId)).length;
|
|
2303
|
+
}
|
|
2304
|
+
/**
|
|
2305
|
+
* Checks if given object is of type Entry
|
|
2306
|
+
*/
|
|
2307
|
+
isEntry(obj) {
|
|
2308
|
+
return entrySchema.safeParse(obj).success;
|
|
2309
|
+
}
|
|
2310
|
+
/**
|
|
2311
|
+
* Migrates an potentially outdated Entry file to the current schema
|
|
2312
|
+
*/
|
|
2313
|
+
migrate(potentiallyOutdatedEntryFile) {
|
|
2314
|
+
return entryFileSchema.parse(potentiallyOutdatedEntryFile);
|
|
2315
|
+
}
|
|
2316
|
+
/**
|
|
2317
|
+
* Creates an Entry from given EntryFile by resolving it's Values
|
|
2318
|
+
*/
|
|
2319
|
+
async toEntry(projectId, collectionId, entryFile) {
|
|
2320
|
+
const history = await this.gitService.log(pathTo.project(projectId), { filePath: pathTo.entryFile(projectId, collectionId, entryFile.id) });
|
|
2321
|
+
return {
|
|
2322
|
+
...entryFile,
|
|
2323
|
+
history
|
|
2324
|
+
};
|
|
2325
|
+
}
|
|
2326
|
+
};
|
|
2327
|
+
|
|
2328
|
+
//#endregion
|
|
2329
|
+
//#region src/service/GitTagService.ts
|
|
2330
|
+
/**
|
|
2331
|
+
* Service that manages CRUD functionality for GitTags
|
|
2332
|
+
*/
|
|
2333
|
+
var GitTagService = class extends AbstractCrudService {
|
|
2334
|
+
git;
|
|
2335
|
+
constructor(options, git, logService) {
|
|
2336
|
+
super(serviceTypeSchema.enum.GitTag, options, logService);
|
|
2337
|
+
this.git = git;
|
|
2338
|
+
}
|
|
2339
|
+
/**
|
|
2340
|
+
* Creates a new tag
|
|
2341
|
+
*
|
|
2342
|
+
* @see https://git-scm.com/docs/git-tag#Documentation/git-tag.txt---annotate
|
|
2343
|
+
*/
|
|
2344
|
+
async create(props) {
|
|
2345
|
+
createGitTagSchema.parse(props);
|
|
2346
|
+
const id = uuid();
|
|
2347
|
+
let args = [
|
|
2348
|
+
"tag",
|
|
2349
|
+
"--annotate",
|
|
2350
|
+
id
|
|
2351
|
+
];
|
|
2352
|
+
if (props.hash) args = [...args, props.hash];
|
|
2353
|
+
args = [
|
|
2354
|
+
...args,
|
|
2355
|
+
"-m",
|
|
2356
|
+
props.message
|
|
2357
|
+
];
|
|
2358
|
+
await this.git(props.path, args);
|
|
2359
|
+
return await this.read({
|
|
2360
|
+
path: props.path,
|
|
2361
|
+
id
|
|
2362
|
+
});
|
|
2363
|
+
}
|
|
2364
|
+
/**
|
|
2365
|
+
* Returns a tag by ID
|
|
2366
|
+
*
|
|
2367
|
+
* Internally uses list() but only returns the tag with matching ID.
|
|
2368
|
+
*/
|
|
2369
|
+
async read(props) {
|
|
2370
|
+
readGitTagSchema.parse(props);
|
|
2371
|
+
const tag = (await this.list({ path: props.path })).list.find((tag) => {
|
|
2372
|
+
return tag.id === props.id;
|
|
2373
|
+
});
|
|
2374
|
+
if (!tag) throw new GitError(`Provided tag with UUID "${props.id}" did not match any known tags`);
|
|
2375
|
+
return tag;
|
|
2376
|
+
}
|
|
2377
|
+
/**
|
|
2378
|
+
* Updating a git tag is not supported.
|
|
2379
|
+
* Please delete the old and create a new one
|
|
2380
|
+
*
|
|
2381
|
+
* @deprecated
|
|
2382
|
+
* @see https://git-scm.com/docs/git-tag#_on_re_tagging
|
|
2383
|
+
*/
|
|
2384
|
+
update() {
|
|
2385
|
+
throw new Error("Updating a git tag is not supported. Please delete the old and create a new one");
|
|
2386
|
+
}
|
|
2387
|
+
/**
|
|
2388
|
+
* Deletes a tag
|
|
2389
|
+
*
|
|
2390
|
+
* @see https://git-scm.com/docs/git-tag#Documentation/git-tag.txt---delete
|
|
2391
|
+
*
|
|
2392
|
+
* @param path Path to the repository
|
|
2393
|
+
* @param id UUID of the tag to delete
|
|
2394
|
+
*/
|
|
2395
|
+
async delete(props) {
|
|
2396
|
+
deleteGitTagSchema.parse(props);
|
|
2397
|
+
const args = [
|
|
2398
|
+
"tag",
|
|
2399
|
+
"--delete",
|
|
2400
|
+
props.id
|
|
2401
|
+
];
|
|
2402
|
+
await this.git(props.path, args);
|
|
2403
|
+
}
|
|
2404
|
+
/**
|
|
2405
|
+
* Gets all local tags or filter them by pattern
|
|
2406
|
+
*
|
|
2407
|
+
* They are sorted by authordate of the commit, not when the tag is created.
|
|
2408
|
+
* This ensures tags are sorted correctly in the timeline of their commits.
|
|
2409
|
+
*
|
|
2410
|
+
* @see https://git-scm.com/docs/git-tag#Documentation/git-tag.txt---list
|
|
2411
|
+
*/
|
|
2412
|
+
async list(props) {
|
|
2413
|
+
listGitTagsSchema.parse(props);
|
|
2414
|
+
let args = ["tag", "--list"];
|
|
2415
|
+
args = [
|
|
2416
|
+
...args,
|
|
2417
|
+
"--sort=-*authordate",
|
|
2418
|
+
"--format=%(refname:short)|%(subject)|%(*authorname)|%(*authoremail)|%(*authordate:iso-strict)"
|
|
2419
|
+
];
|
|
2420
|
+
const gitTags = (await this.git(props.path, args)).stdout.split("\n").filter((line) => {
|
|
2421
|
+
return line.trim() !== "";
|
|
2422
|
+
}).map((line) => {
|
|
2423
|
+
const lineArray = line.split("|");
|
|
2424
|
+
if (lineArray[3]?.startsWith("<") && lineArray[3]?.endsWith(">")) {
|
|
2425
|
+
lineArray[3] = lineArray[3].slice(1, -1);
|
|
2426
|
+
lineArray[3] = lineArray[3].slice(0, -1);
|
|
2427
|
+
}
|
|
2428
|
+
return {
|
|
2429
|
+
id: lineArray[0],
|
|
2430
|
+
message: lineArray[1],
|
|
2431
|
+
author: {
|
|
2432
|
+
name: lineArray[2],
|
|
2433
|
+
email: lineArray[3]
|
|
2434
|
+
},
|
|
2435
|
+
datetime: datetime(lineArray[4])
|
|
2436
|
+
};
|
|
2437
|
+
}).filter(this.isGitTag.bind(this));
|
|
2438
|
+
return {
|
|
2439
|
+
total: gitTags.length,
|
|
2440
|
+
limit: 0,
|
|
2441
|
+
offset: 0,
|
|
2442
|
+
list: gitTags
|
|
2443
|
+
};
|
|
2444
|
+
}
|
|
2445
|
+
/**
|
|
2446
|
+
* Returns the total number of tags inside given repository
|
|
2447
|
+
*
|
|
2448
|
+
* Internally uses list(), so do not use count()
|
|
2449
|
+
* in conjuncion with it to avoid multiple git calls.
|
|
2450
|
+
*
|
|
2451
|
+
* @param path Path to the repository
|
|
2452
|
+
*/
|
|
2453
|
+
async count(props) {
|
|
2454
|
+
countGitTagsSchema.parse(props);
|
|
2455
|
+
return (await this.list({ path: props.path })).total;
|
|
2456
|
+
}
|
|
2457
|
+
/**
|
|
2458
|
+
* Type guard for GitTag
|
|
2459
|
+
*
|
|
2460
|
+
* @param obj The object to check
|
|
2461
|
+
*/
|
|
2462
|
+
isGitTag(obj) {
|
|
2463
|
+
return gitTagSchema.safeParse(obj).success;
|
|
2464
|
+
}
|
|
2465
|
+
};
|
|
2466
|
+
|
|
2467
|
+
//#endregion
|
|
2468
|
+
//#region src/service/GitService.ts
|
|
2469
|
+
/**
|
|
2470
|
+
* Service that manages Git functionality
|
|
2471
|
+
*
|
|
2472
|
+
* Uses dugite Node.js bindings for Git to be fully compatible
|
|
2473
|
+
* and be able to leverage Git LFS functionality
|
|
2474
|
+
* @see https://github.com/desktop/dugite
|
|
2475
|
+
*
|
|
2476
|
+
* Heavily inspired by the GitHub Desktop app
|
|
2477
|
+
* @see https://github.com/desktop/desktop
|
|
2478
|
+
*
|
|
2479
|
+
* Git operations are sequential!
|
|
2480
|
+
* We use a FIFO queue to translate async calls
|
|
2481
|
+
* into a sequence of git operations
|
|
2482
|
+
*
|
|
2483
|
+
* @todo All public methods should recieve only a single object as parameter and the type should be defined through the shared library to be accessible in Core and Client
|
|
2484
|
+
*/
|
|
2485
|
+
var GitService = class {
|
|
2486
|
+
version;
|
|
2487
|
+
gitPath;
|
|
2488
|
+
queue;
|
|
2489
|
+
logService;
|
|
2490
|
+
gitTagService;
|
|
2491
|
+
userService;
|
|
2492
|
+
constructor(options, logService, userService) {
|
|
2493
|
+
this.version = null;
|
|
2494
|
+
this.gitPath = null;
|
|
2495
|
+
this.queue = new PQueue({ concurrency: 1 });
|
|
2496
|
+
this.gitTagService = new GitTagService(options, this.git.bind(this), logService);
|
|
2497
|
+
this.logService = logService;
|
|
2498
|
+
this.userService = userService;
|
|
2499
|
+
this.updateVersion();
|
|
2500
|
+
this.updateGitPath();
|
|
2501
|
+
}
|
|
2502
|
+
/**
|
|
2503
|
+
* CRUD methods to work with git tags
|
|
2504
|
+
*/
|
|
2505
|
+
get tags() {
|
|
2506
|
+
return this.gitTagService;
|
|
2507
|
+
}
|
|
2508
|
+
/**
|
|
2509
|
+
* Create an empty Git repository or reinitialize an existing one
|
|
2510
|
+
*
|
|
2511
|
+
* @see https://git-scm.com/docs/git-init
|
|
2512
|
+
*
|
|
2513
|
+
* @param path Path to initialize in. Fails if path does not exist
|
|
2514
|
+
* @param options Options specific to the init operation
|
|
2515
|
+
*/
|
|
2516
|
+
async init(path, options) {
|
|
2517
|
+
let args = ["init"];
|
|
2518
|
+
if (options?.initialBranch) args = [...args, `--initial-branch=${options.initialBranch}`];
|
|
2519
|
+
await this.git(path, args);
|
|
2520
|
+
await this.setLocalConfig(path);
|
|
2521
|
+
}
|
|
2522
|
+
/**
|
|
2523
|
+
* Clone a repository into a directory
|
|
2524
|
+
*
|
|
2525
|
+
* @see https://git-scm.com/docs/git-clone
|
|
2526
|
+
*
|
|
2527
|
+
* @todo Implement progress callback / events
|
|
2528
|
+
*
|
|
2529
|
+
* @param url The remote repository URL to clone from
|
|
2530
|
+
* @param path The destination path for the cloned repository.
|
|
2531
|
+
* Which is only working if the directory is existing and empty.
|
|
2532
|
+
* @param options Options specific to the clone operation
|
|
2533
|
+
*/
|
|
2534
|
+
async clone(url, path, options) {
|
|
2535
|
+
let args = ["clone", "--progress"];
|
|
2536
|
+
if (options?.bare) args = [...args, "--bare"];
|
|
2537
|
+
if (options?.branch) args = [
|
|
2538
|
+
...args,
|
|
2539
|
+
"--branch",
|
|
2540
|
+
options.branch
|
|
2541
|
+
];
|
|
2542
|
+
if (options?.depth) args = [
|
|
2543
|
+
...args,
|
|
2544
|
+
"--depth",
|
|
2545
|
+
options.depth.toString()
|
|
2546
|
+
];
|
|
2547
|
+
if (options?.singleBranch === true) args = [...args, "--single-branch"];
|
|
2548
|
+
await this.git("", [
|
|
2549
|
+
...args,
|
|
2550
|
+
url,
|
|
2551
|
+
path
|
|
2552
|
+
]);
|
|
2553
|
+
await this.setLocalConfig(path);
|
|
2554
|
+
}
|
|
2555
|
+
/**
|
|
2556
|
+
* Add file contents to the index
|
|
2557
|
+
*
|
|
2558
|
+
* @see https://git-scm.com/docs/git-add
|
|
2559
|
+
*
|
|
2560
|
+
* @param path Path to the repository
|
|
2561
|
+
* @param files Files to add
|
|
2562
|
+
*/
|
|
2563
|
+
async add(path, files) {
|
|
2564
|
+
const args = [
|
|
2565
|
+
"add",
|
|
2566
|
+
"--",
|
|
2567
|
+
...files.map((filePath) => {
|
|
2568
|
+
return filePath.replace(`${path}${Path.sep}`, "");
|
|
2569
|
+
})
|
|
2570
|
+
];
|
|
2571
|
+
await this.git(path, args);
|
|
2572
|
+
}
|
|
2573
|
+
async status(path) {
|
|
2574
|
+
return (await this.git(path, ["status", "--porcelain=2"])).stdout.split("\n").filter((line) => {
|
|
2575
|
+
return line.trim() !== "";
|
|
2576
|
+
}).map((line) => {
|
|
2577
|
+
return { filePath: line.trim().split(" ")[8] };
|
|
2578
|
+
});
|
|
2579
|
+
}
|
|
2580
|
+
branches = {
|
|
2581
|
+
list: async (path) => {
|
|
2582
|
+
const normalizedLinesArr = (await this.git(path, [
|
|
2583
|
+
"branch",
|
|
2584
|
+
"--list",
|
|
2585
|
+
"--all"
|
|
2586
|
+
])).stdout.split("\n").filter((line) => {
|
|
2587
|
+
return line.trim() !== "";
|
|
2588
|
+
}).map((line) => {
|
|
2589
|
+
return line.trim().replace("* ", "");
|
|
2590
|
+
});
|
|
2591
|
+
const local = [];
|
|
2592
|
+
const remote = [];
|
|
2593
|
+
normalizedLinesArr.forEach((line) => {
|
|
2594
|
+
if (line.startsWith("remotes/")) remote.push(line.replace("remotes/", ""));
|
|
2595
|
+
else local.push(line);
|
|
2596
|
+
});
|
|
2597
|
+
return {
|
|
2598
|
+
local,
|
|
2599
|
+
remote
|
|
2600
|
+
};
|
|
2601
|
+
},
|
|
2602
|
+
current: async (path) => {
|
|
2603
|
+
return (await this.git(path, ["branch", "--show-current"])).stdout.trim();
|
|
2604
|
+
},
|
|
2605
|
+
switch: async (path, branch, options) => {
|
|
2606
|
+
await this.checkBranchOrTagName(path, branch);
|
|
2607
|
+
let args = ["switch"];
|
|
2608
|
+
if (options?.isNew === true) args = [
|
|
2609
|
+
...args,
|
|
2610
|
+
"--create",
|
|
2611
|
+
branch
|
|
2612
|
+
];
|
|
2613
|
+
else args = [...args, branch];
|
|
2614
|
+
await this.git(path, args);
|
|
2615
|
+
},
|
|
2616
|
+
delete: async (path, branch, force) => {
|
|
2617
|
+
let args = ["branch", "--delete"];
|
|
2618
|
+
if (force === true) args = [...args, "--force"];
|
|
2619
|
+
await this.git(path, [...args, branch]);
|
|
2620
|
+
}
|
|
2621
|
+
};
|
|
2622
|
+
remotes = {
|
|
2623
|
+
list: async (path) => {
|
|
2624
|
+
return (await this.git(path, ["remote"])).stdout.split("\n").filter((line) => {
|
|
2625
|
+
return line.trim() !== "";
|
|
2626
|
+
});
|
|
2627
|
+
},
|
|
2628
|
+
hasOrigin: async (path) => {
|
|
2629
|
+
if ((await this.remotes.list(path)).includes("origin")) return true;
|
|
2630
|
+
return false;
|
|
2631
|
+
},
|
|
2632
|
+
addOrigin: async (path, url) => {
|
|
2633
|
+
const args = [
|
|
2634
|
+
"remote",
|
|
2635
|
+
"add",
|
|
2636
|
+
"origin",
|
|
2637
|
+
url.trim()
|
|
2638
|
+
];
|
|
2639
|
+
await this.git(path, args);
|
|
2640
|
+
},
|
|
2641
|
+
getOriginUrl: async (path) => {
|
|
2642
|
+
const result = (await this.git(path, [
|
|
2643
|
+
"remote",
|
|
2644
|
+
"get-url",
|
|
2645
|
+
"origin"
|
|
2646
|
+
])).stdout.trim();
|
|
2647
|
+
return result.length === 0 ? null : result;
|
|
2648
|
+
},
|
|
2649
|
+
setOriginUrl: async (path, url) => {
|
|
2650
|
+
const args = [
|
|
2651
|
+
"remote",
|
|
2652
|
+
"set-url",
|
|
2653
|
+
"origin",
|
|
2654
|
+
url.trim()
|
|
2655
|
+
];
|
|
2656
|
+
await this.git(path, args);
|
|
2657
|
+
}
|
|
2658
|
+
};
|
|
2659
|
+
/**
|
|
2660
|
+
* Join two development histories together
|
|
2661
|
+
*
|
|
2662
|
+
* @see https://git-scm.com/docs/git-merge
|
|
2663
|
+
*/
|
|
2664
|
+
async merge(path, branch, options) {
|
|
2665
|
+
let args = ["merge"];
|
|
2666
|
+
if (options?.squash === true) args = [...args, "--squash"];
|
|
2667
|
+
args = [...args, branch];
|
|
2668
|
+
await this.git(path, args);
|
|
2669
|
+
}
|
|
2670
|
+
/**
|
|
2671
|
+
* Reset current HEAD to the specified state
|
|
2672
|
+
*
|
|
2673
|
+
* @todo maybe add more options
|
|
2674
|
+
* @see https://git-scm.com/docs/git-reset
|
|
2675
|
+
*
|
|
2676
|
+
* @param path Path to the repository
|
|
2677
|
+
* @param mode Modifies the working tree depending on given mode
|
|
2678
|
+
* @param commit Resets the current branch head to this commit / tag
|
|
2679
|
+
*/
|
|
2680
|
+
async reset(path, mode, commit) {
|
|
2681
|
+
const args = [
|
|
2682
|
+
"reset",
|
|
2683
|
+
`--${mode}`,
|
|
2684
|
+
commit
|
|
2685
|
+
];
|
|
2686
|
+
await this.git(path, args);
|
|
2687
|
+
}
|
|
2688
|
+
/**
|
|
2689
|
+
* Restore working tree files
|
|
2690
|
+
*
|
|
2691
|
+
* @see https://git-scm.com/docs/git-restore/
|
|
2692
|
+
*
|
|
2693
|
+
* @todo It's probably a good idea to not use restore
|
|
2694
|
+
* for a use case where someone just wants to have a look
|
|
2695
|
+
* and maybe copy something from a deleted file.
|
|
2696
|
+
* We should use `checkout` without `add .` and `commit` for that
|
|
2697
|
+
*
|
|
2698
|
+
* @param path Path to the repository
|
|
2699
|
+
* @param source Git commit SHA or tag name to restore to
|
|
2700
|
+
* @param files Files to restore
|
|
2701
|
+
*/
|
|
2702
|
+
/**
|
|
2703
|
+
* Download objects and refs from remote `origin`
|
|
2704
|
+
*
|
|
2705
|
+
* @see https://www.git-scm.com/docs/git-fetch
|
|
2706
|
+
*
|
|
2707
|
+
* @param path Path to the repository
|
|
2708
|
+
*/
|
|
2709
|
+
async fetch(path) {
|
|
2710
|
+
await this.git(path, ["fetch"]);
|
|
2711
|
+
}
|
|
2712
|
+
/**
|
|
2713
|
+
* Fetch from and integrate (rebase or merge) with a local branch
|
|
2714
|
+
*
|
|
2715
|
+
* @see https://git-scm.com/docs/git-pull
|
|
2716
|
+
*
|
|
2717
|
+
* @param path Path to the repository
|
|
2718
|
+
*/
|
|
2719
|
+
async pull(path) {
|
|
2720
|
+
await this.git(path, ["pull"]);
|
|
2721
|
+
}
|
|
2722
|
+
/**
|
|
2723
|
+
* Update remote refs along with associated objects to remote `origin`
|
|
2724
|
+
*
|
|
2725
|
+
* @see https://git-scm.com/docs/git-push
|
|
2726
|
+
*
|
|
2727
|
+
* @param path Path to the repository
|
|
2728
|
+
*/
|
|
2729
|
+
async push(path, options) {
|
|
2730
|
+
let args = ["push", "origin"];
|
|
2731
|
+
if (options?.all === true) args = [...args, "--all"];
|
|
2732
|
+
if (options?.force === true) args = [...args, "--force"];
|
|
2733
|
+
await this.git(path, args);
|
|
2734
|
+
}
|
|
2735
|
+
/**
|
|
2736
|
+
* Record changes to the repository
|
|
2737
|
+
*
|
|
2738
|
+
* @see https://git-scm.com/docs/git-commit
|
|
2739
|
+
*
|
|
2740
|
+
* @param path Path to the repository
|
|
2741
|
+
* @param message An object describing the changes
|
|
2742
|
+
*/
|
|
2743
|
+
async commit(path, message) {
|
|
2744
|
+
gitMessageSchema.parse(message);
|
|
2745
|
+
const user = await this.userService.get();
|
|
2746
|
+
if (!user) throw new NoCurrentUserError();
|
|
2747
|
+
const args = [
|
|
2748
|
+
"commit",
|
|
2749
|
+
`--message=${JSON.stringify(message)}`,
|
|
2750
|
+
`--author=${user.name} <${user.email}>`
|
|
2751
|
+
];
|
|
2752
|
+
await this.git(path, args);
|
|
2753
|
+
}
|
|
2754
|
+
/**
|
|
2755
|
+
* Gets local commit history
|
|
2756
|
+
*
|
|
2757
|
+
* @see https://git-scm.com/docs/git-log
|
|
2758
|
+
*
|
|
2759
|
+
* @todo Check if there is a need to trim the git commit message of chars
|
|
2760
|
+
* @todo Use this method in a service. Decide if we need a HistoryService for example
|
|
2761
|
+
*
|
|
2762
|
+
* @param path Path to the repository
|
|
2763
|
+
* @param options Options specific to the log operation
|
|
2764
|
+
*/
|
|
2765
|
+
async log(path, options) {
|
|
2766
|
+
let args = ["log"];
|
|
2767
|
+
if (options?.between?.from) args = [...args, `${options.between.from}..${options.between.to || "HEAD"}`];
|
|
2768
|
+
if (options?.limit) args = [...args, `--max-count=${options.limit}`];
|
|
2769
|
+
args = [...args, "--format=%H|%s|%an|%ae|%aI|%D"];
|
|
2770
|
+
if (options?.filePath) args = [
|
|
2771
|
+
...args,
|
|
2772
|
+
"--",
|
|
2773
|
+
options.filePath
|
|
2774
|
+
];
|
|
2775
|
+
const noEmptyLinesArr = (await this.git(path, args)).stdout.split("\n").filter((line) => {
|
|
2776
|
+
return line.trim() !== "";
|
|
2777
|
+
});
|
|
2778
|
+
return (await Promise.all(noEmptyLinesArr.map(async (line) => {
|
|
2779
|
+
const lineArray = line.split("|");
|
|
2780
|
+
const tagId = this.refNameToTagName(lineArray[5] || "");
|
|
2781
|
+
const tag = tagId ? await this.tags.read({
|
|
2782
|
+
path,
|
|
2783
|
+
id: tagId
|
|
2784
|
+
}) : null;
|
|
2785
|
+
return {
|
|
2786
|
+
hash: lineArray[0],
|
|
2787
|
+
message: JSON.parse(lineArray[1] || ""),
|
|
2788
|
+
author: {
|
|
2789
|
+
name: lineArray[2],
|
|
2790
|
+
email: lineArray[3]
|
|
2791
|
+
},
|
|
2792
|
+
datetime: datetime(lineArray[4]),
|
|
2793
|
+
tag
|
|
2794
|
+
};
|
|
2795
|
+
}))).filter(this.isGitCommit.bind(this));
|
|
2796
|
+
}
|
|
2797
|
+
/**
|
|
2798
|
+
* Retrieves the content of a file at a specific commit
|
|
2799
|
+
*
|
|
2800
|
+
* @see https://git-scm.com/docs/git-show
|
|
2801
|
+
*/
|
|
2802
|
+
async getFileContentAtCommit(path, filePath, commitHash, encoding = "utf8") {
|
|
2803
|
+
const args = ["show", `${commitHash}:${filePath.replace(`${path}${Path.sep}`, "").split("\\").join("/")}`];
|
|
2804
|
+
const setEncoding = (cb) => {
|
|
2805
|
+
if (cb.stdout) cb.stdout.setEncoding(encoding);
|
|
2806
|
+
};
|
|
2807
|
+
return (await this.git(path, args, { processCallback: setEncoding })).stdout;
|
|
2808
|
+
}
|
|
2809
|
+
refNameToTagName(refName) {
|
|
2810
|
+
const tagName = refName.replace("tag: ", "").trim();
|
|
2811
|
+
if (tagName === "" || uuidSchema.safeParse(tagName).success === false) return null;
|
|
2812
|
+
return tagName;
|
|
2813
|
+
}
|
|
2814
|
+
/**
|
|
2815
|
+
* Reads the currently used version of Git
|
|
2816
|
+
*
|
|
2817
|
+
* This can help debugging
|
|
2818
|
+
*/
|
|
2819
|
+
async updateVersion() {
|
|
2820
|
+
this.version = (await this.git("", ["--version"])).stdout.replace("git version", "").trim();
|
|
2821
|
+
}
|
|
2822
|
+
/**
|
|
2823
|
+
* Reads the path to the executable of Git that is used
|
|
2824
|
+
*
|
|
2825
|
+
* This can help debugging, since dugite is shipping their own executable
|
|
2826
|
+
* but in some cases resolves another executable
|
|
2827
|
+
* @see https://github.com/desktop/dugite/blob/main/lib/git-environment.ts
|
|
2828
|
+
*/
|
|
2829
|
+
async updateGitPath() {
|
|
2830
|
+
this.gitPath = (await this.git("", ["--exec-path"])).stdout.trim();
|
|
2831
|
+
}
|
|
2832
|
+
/**
|
|
2833
|
+
* A reference is used in Git to specify branches and tags.
|
|
2834
|
+
* This method checks if given name matches the required format
|
|
2835
|
+
*
|
|
2836
|
+
* @see https://git-scm.com/docs/git-check-ref-format
|
|
2837
|
+
*
|
|
2838
|
+
* @param path Path to the repository
|
|
2839
|
+
* @param name Name to check
|
|
2840
|
+
*/
|
|
2841
|
+
async checkBranchOrTagName(path, name) {
|
|
2842
|
+
await this.git(path, [
|
|
2843
|
+
"check-ref-format",
|
|
2844
|
+
"--allow-onelevel",
|
|
2845
|
+
name
|
|
2846
|
+
]);
|
|
2847
|
+
}
|
|
2848
|
+
/**
|
|
2849
|
+
* Sets the git config of given local repository from ElekIoCoreOptions
|
|
2850
|
+
*
|
|
2851
|
+
* @param path Path to the repository
|
|
2852
|
+
*/
|
|
2853
|
+
async setLocalConfig(path) {
|
|
2854
|
+
const user = await this.userService.get();
|
|
2855
|
+
if (!user) throw new NoCurrentUserError();
|
|
2856
|
+
const userNameArgs = [
|
|
2857
|
+
"config",
|
|
2858
|
+
"--local",
|
|
2859
|
+
"user.name",
|
|
2860
|
+
user.name
|
|
2861
|
+
];
|
|
2862
|
+
const userEmailArgs = [
|
|
2863
|
+
"config",
|
|
2864
|
+
"--local",
|
|
2865
|
+
"user.email",
|
|
2866
|
+
user.email
|
|
2867
|
+
];
|
|
2868
|
+
const autoSetupRemoteArgs = [
|
|
2869
|
+
"config",
|
|
2870
|
+
"--local",
|
|
2871
|
+
"push.autoSetupRemote",
|
|
2872
|
+
"true"
|
|
2873
|
+
];
|
|
2874
|
+
const pullRebaseArgs = [
|
|
2875
|
+
"config",
|
|
2876
|
+
"--local",
|
|
2877
|
+
"pull.rebase",
|
|
2878
|
+
"true"
|
|
2879
|
+
];
|
|
2880
|
+
await this.git(path, userNameArgs);
|
|
2881
|
+
await this.git(path, userEmailArgs);
|
|
2882
|
+
await this.git(path, autoSetupRemoteArgs);
|
|
2883
|
+
await this.git(path, pullRebaseArgs);
|
|
2884
|
+
}
|
|
2885
|
+
/**
|
|
2886
|
+
* Type guard for GitCommit
|
|
2887
|
+
*
|
|
2888
|
+
* @param obj The object to check
|
|
2889
|
+
*/
|
|
2890
|
+
isGitCommit(obj) {
|
|
2891
|
+
return gitCommitSchema.safeParse(obj).success;
|
|
2892
|
+
}
|
|
2893
|
+
/**
|
|
2894
|
+
* Wraps the execution of any git command
|
|
2895
|
+
* to use a FIFO queue for sequential processing
|
|
2896
|
+
*
|
|
2897
|
+
* @param path Path to the repository
|
|
2898
|
+
* @param args Arguments to append after the `git` command
|
|
2899
|
+
*/
|
|
2900
|
+
async git(path, args, options) {
|
|
2901
|
+
const result = await this.queue.add(async () => {
|
|
2902
|
+
const start = Date.now();
|
|
2903
|
+
return {
|
|
2904
|
+
gitResult: await exec(args, path, options),
|
|
2905
|
+
durationMs: Date.now() - start
|
|
2906
|
+
};
|
|
2907
|
+
});
|
|
2908
|
+
if (!result) throw new GitError(`Git ${this.version} (${this.gitPath}) command "git ${args.join(" ")}" executed for "${path}" failed to return a result`);
|
|
2909
|
+
const gitLog = {
|
|
2910
|
+
source: "core",
|
|
2911
|
+
message: `Executed "git ${args.join(" ")}" in ${result.durationMs}ms`,
|
|
2912
|
+
meta: { command: `git ${args.join(" ")}` }
|
|
2913
|
+
};
|
|
2914
|
+
if (result.durationMs >= 100) this.logService.warn(gitLog);
|
|
2915
|
+
else this.logService.debug(gitLog);
|
|
2916
|
+
if (result.gitResult.exitCode !== 0) throw new GitError(`Git ${this.version} (${this.gitPath}) command "git ${args.join(" ")}" executed for "${path}" failed with exit code "${result.gitResult.exitCode}" and message "${result.gitResult.stderr.toString().trim() || result.gitResult.stdout.toString().trim()}"`);
|
|
2917
|
+
return {
|
|
2918
|
+
...result.gitResult,
|
|
2919
|
+
stdout: result.gitResult.stdout.toString(),
|
|
2920
|
+
stderr: result.gitResult.stderr.toString()
|
|
2921
|
+
};
|
|
2922
|
+
}
|
|
2923
|
+
};
|
|
2924
|
+
|
|
2925
|
+
//#endregion
|
|
2926
|
+
//#region src/service/JsonFileService.ts
|
|
2927
|
+
/**
|
|
2928
|
+
* Service that manages CRUD functionality for JSON files on disk
|
|
2929
|
+
*/
|
|
2930
|
+
var JsonFileService = class extends AbstractCrudService {
|
|
2931
|
+
cache = /* @__PURE__ */ new Map();
|
|
2932
|
+
constructor(options, logService) {
|
|
2933
|
+
super(serviceTypeSchema.enum.JsonFile, options, logService);
|
|
2934
|
+
}
|
|
2935
|
+
/**
|
|
2936
|
+
* Creates a new file on disk. Fails if path already exists
|
|
2937
|
+
*
|
|
2938
|
+
* @param data Data to write into the file
|
|
2939
|
+
* @param path Path to write the file to
|
|
2940
|
+
* @param schema Schema of the file to validate against
|
|
2941
|
+
* @returns Validated content of the file from disk
|
|
2942
|
+
*/
|
|
2943
|
+
async create(data, path, schema) {
|
|
2944
|
+
const parsedData = schema.parse(data);
|
|
2945
|
+
const string = this.serialize(parsedData);
|
|
2946
|
+
await Fs.writeFile(path, string, {
|
|
2947
|
+
flag: "wx",
|
|
2948
|
+
encoding: "utf8"
|
|
2949
|
+
});
|
|
2950
|
+
if (this.options.file.cache === true) this.cache.set(path, parsedData);
|
|
2951
|
+
this.logService.debug({
|
|
2952
|
+
source: "core",
|
|
2953
|
+
message: `Created file "${path}"`
|
|
2954
|
+
});
|
|
2955
|
+
return parsedData;
|
|
2956
|
+
}
|
|
2957
|
+
/**
|
|
2958
|
+
* Reads the content of a file on disk. Fails if path does not exist
|
|
2959
|
+
*
|
|
2960
|
+
* @param path Path to read the file from
|
|
2961
|
+
* @param schema Schema of the file to validate against
|
|
2962
|
+
* @returns Validated content of the file from disk
|
|
2963
|
+
*/
|
|
2964
|
+
async read(path, schema) {
|
|
2965
|
+
if (this.options.file.cache === true && this.cache.has(path)) {
|
|
2966
|
+
this.logService.debug({
|
|
2967
|
+
source: "core",
|
|
2968
|
+
message: `Cache hit reading file "${path}"`
|
|
2969
|
+
});
|
|
2970
|
+
const json = this.cache.get(path);
|
|
2971
|
+
return schema.parse(json);
|
|
2972
|
+
}
|
|
2973
|
+
this.logService.debug({
|
|
2974
|
+
source: "core",
|
|
2975
|
+
message: `Cache miss reading file "${path}"`
|
|
2976
|
+
});
|
|
2977
|
+
const data = await Fs.readFile(path, {
|
|
2978
|
+
flag: "r",
|
|
2979
|
+
encoding: "utf8"
|
|
2980
|
+
});
|
|
2981
|
+
const json = this.deserialize(data);
|
|
2982
|
+
const parsedData = schema.parse(json);
|
|
2983
|
+
if (this.options.file.cache === true) this.cache.set(path, parsedData);
|
|
2984
|
+
return parsedData;
|
|
2985
|
+
}
|
|
2986
|
+
/**
|
|
2987
|
+
* Reads the content of a file on disk. Fails if path does not exist.
|
|
2988
|
+
* Does not validate the content of the file against a schema and
|
|
2989
|
+
* therefore is only to be used when retrieving data we do not have
|
|
2990
|
+
* a current schema for. E.g. reading from history or while upgrading
|
|
2991
|
+
* the old schema of a file to a new, current schema.
|
|
2992
|
+
*
|
|
2993
|
+
* Does not read from or write to cache.
|
|
2994
|
+
*
|
|
2995
|
+
* @param path Path to read the file from
|
|
2996
|
+
* @returns Unvalidated content of the file from disk
|
|
2997
|
+
*/
|
|
2998
|
+
async unsafeRead(path) {
|
|
2999
|
+
this.logService.warn({
|
|
3000
|
+
source: "core",
|
|
3001
|
+
message: `Unsafe reading of file "${path}"`
|
|
3002
|
+
});
|
|
3003
|
+
const data = await Fs.readFile(path, {
|
|
3004
|
+
flag: "r",
|
|
3005
|
+
encoding: "utf8"
|
|
3006
|
+
});
|
|
3007
|
+
return this.deserialize(data);
|
|
3008
|
+
}
|
|
3009
|
+
/**
|
|
3010
|
+
* Overwrites an existing file on disk
|
|
3011
|
+
*
|
|
3012
|
+
* @todo Check how to error out if the file does not exist already
|
|
3013
|
+
*
|
|
3014
|
+
* @param data Data to write into the file
|
|
3015
|
+
* @param path Path to the file to overwrite
|
|
3016
|
+
* @param schema Schema of the file to validate against
|
|
3017
|
+
* @returns Validated content of the file from disk
|
|
3018
|
+
*/
|
|
3019
|
+
async update(data, path, schema) {
|
|
3020
|
+
const parsedData = schema.parse(data);
|
|
3021
|
+
const string = this.serialize(parsedData);
|
|
3022
|
+
await Fs.writeFile(path, string, {
|
|
3023
|
+
flag: "w",
|
|
3024
|
+
encoding: "utf8"
|
|
3025
|
+
});
|
|
3026
|
+
if (this.options.file.cache === true) this.cache.set(path, parsedData);
|
|
3027
|
+
this.logService.debug({
|
|
3028
|
+
source: "core",
|
|
3029
|
+
message: `Updated file "${path}"`
|
|
3030
|
+
});
|
|
3031
|
+
return parsedData;
|
|
3032
|
+
}
|
|
3033
|
+
serialize(data) {
|
|
3034
|
+
return JSON.stringify(data, null, 2);
|
|
3035
|
+
}
|
|
3036
|
+
deserialize(data) {
|
|
3037
|
+
return JSON.parse(data);
|
|
3038
|
+
}
|
|
3039
|
+
};
|
|
3040
|
+
|
|
3041
|
+
//#endregion
|
|
3042
|
+
//#region src/service/LogService.ts
|
|
3043
|
+
/**
|
|
3044
|
+
* Service that handles logging to file and console
|
|
3045
|
+
*/
|
|
3046
|
+
var LogService = class {
|
|
3047
|
+
logger;
|
|
3048
|
+
constructor(options) {
|
|
3049
|
+
const rotatingFileTransport = new DailyRotateFile({
|
|
3050
|
+
dirname: pathTo.logs,
|
|
3051
|
+
filename: "%DATE%.log",
|
|
3052
|
+
datePattern: "YYYY-MM-DD",
|
|
3053
|
+
zippedArchive: true,
|
|
3054
|
+
maxFiles: "30d",
|
|
3055
|
+
handleExceptions: true,
|
|
3056
|
+
handleRejections: true,
|
|
3057
|
+
format: format.combine(format.timestamp(), format.json())
|
|
3058
|
+
});
|
|
3059
|
+
rotatingFileTransport.on("rotate", (oldFilename, newFilename) => {
|
|
3060
|
+
this.info({
|
|
3061
|
+
message: `Rotated log file from ${oldFilename} to ${newFilename}`,
|
|
3062
|
+
source: "core"
|
|
3063
|
+
});
|
|
3064
|
+
});
|
|
3065
|
+
rotatingFileTransport.on("error", (error) => {
|
|
3066
|
+
this.error({
|
|
3067
|
+
message: `Error rotating log file: ${error.message}`,
|
|
3068
|
+
source: "core",
|
|
3069
|
+
meta: { error }
|
|
3070
|
+
});
|
|
3071
|
+
});
|
|
3072
|
+
const consoleTransport = new transports.Console({
|
|
3073
|
+
handleExceptions: true,
|
|
3074
|
+
handleRejections: true,
|
|
3075
|
+
format: format.combine(format.colorize(), format.timestamp({ format: "HH:mm:ss" }), format.printf((props) => {
|
|
3076
|
+
const splatArgs = props[Symbol.for("splat")];
|
|
3077
|
+
const result = logConsoleTransportSchema.safeParse({
|
|
3078
|
+
...splatArgs?.[0] ?? {},
|
|
3079
|
+
timestamp: props["timestamp"],
|
|
3080
|
+
level: props.level,
|
|
3081
|
+
message: props.message
|
|
3082
|
+
});
|
|
3083
|
+
if (result.success) {
|
|
3084
|
+
const { timestamp, level, source, message } = result.data;
|
|
3085
|
+
return `${timestamp} [${source}] ${level}: ${message}`;
|
|
3086
|
+
}
|
|
3087
|
+
return `${String(props["timestamp"])} ${props.level}: ${String(props.message)}`;
|
|
3088
|
+
}))
|
|
3089
|
+
});
|
|
3090
|
+
this.logger = createLogger({
|
|
3091
|
+
level: options.log.level,
|
|
3092
|
+
transports: [rotatingFileTransport, consoleTransport]
|
|
3093
|
+
});
|
|
3094
|
+
}
|
|
3095
|
+
debug(props) {
|
|
3096
|
+
const { source, message, meta } = logSchema.parse(props);
|
|
3097
|
+
this.logger.debug(message, {
|
|
3098
|
+
source,
|
|
3099
|
+
meta
|
|
3100
|
+
});
|
|
3101
|
+
}
|
|
3102
|
+
info(props) {
|
|
3103
|
+
const { source, message, meta } = logSchema.parse(props);
|
|
3104
|
+
this.logger.info(message, {
|
|
3105
|
+
source,
|
|
3106
|
+
meta
|
|
3107
|
+
});
|
|
3108
|
+
}
|
|
3109
|
+
warn(props) {
|
|
3110
|
+
const { source, message, meta } = logSchema.parse(props);
|
|
3111
|
+
this.logger.warn(message, {
|
|
3112
|
+
source,
|
|
3113
|
+
meta
|
|
3114
|
+
});
|
|
3115
|
+
}
|
|
3116
|
+
error(props) {
|
|
3117
|
+
const { source, message, meta } = logSchema.parse(props);
|
|
3118
|
+
this.logger.error(message, {
|
|
3119
|
+
source,
|
|
3120
|
+
meta
|
|
3121
|
+
});
|
|
3122
|
+
}
|
|
3123
|
+
};
|
|
3124
|
+
|
|
3125
|
+
//#endregion
|
|
3126
|
+
//#region src/error/RemoteOriginMissingError.ts
|
|
3127
|
+
var RemoteOriginMissingError = class extends Error {
|
|
3128
|
+
constructor(projectId) {
|
|
3129
|
+
super(`Tried to delete Project "${projectId}" but it does not have a remote origin. Deleting a Project without a remote origin could lead to data loss. Use the "force" option to delete it anyway.`);
|
|
3130
|
+
this.name = "RemoteOriginMissingError";
|
|
3131
|
+
}
|
|
3132
|
+
};
|
|
3133
|
+
|
|
3134
|
+
//#endregion
|
|
3135
|
+
//#region src/error/SynchronizeLocalChangesError.ts
|
|
3136
|
+
var SynchronizeLocalChangesError = class extends Error {
|
|
3137
|
+
constructor(projectId) {
|
|
3138
|
+
super(`Tried to delete Project "${projectId}" but it has local changes that are not yet pushed to the remote origin. Deleting a Project with local changes could lead to data loss. Use the "force" option to delete it anyway.`);
|
|
3139
|
+
this.name = "SynchronizeLocalChangesError";
|
|
3140
|
+
}
|
|
3141
|
+
};
|
|
3142
|
+
|
|
3143
|
+
//#endregion
|
|
3144
|
+
//#region src/service/ProjectService.ts
|
|
3145
|
+
/**
|
|
3146
|
+
* Service that manages CRUD functionality for Project files on disk
|
|
3147
|
+
*/
|
|
3148
|
+
var ProjectService = class extends AbstractCrudService {
|
|
3149
|
+
coreVersion;
|
|
3150
|
+
jsonFileService;
|
|
3151
|
+
gitService;
|
|
3152
|
+
assetService;
|
|
3153
|
+
collectionService;
|
|
3154
|
+
entryService;
|
|
3155
|
+
constructor(coreVersion, options, logService, jsonFileService, gitService, assetService, collectionService, entryService) {
|
|
3156
|
+
super(serviceTypeSchema.enum.Project, options, logService);
|
|
3157
|
+
this.coreVersion = coreVersion;
|
|
3158
|
+
this.jsonFileService = jsonFileService;
|
|
3159
|
+
this.gitService = gitService;
|
|
3160
|
+
this.assetService = assetService;
|
|
3161
|
+
this.collectionService = collectionService;
|
|
3162
|
+
this.entryService = entryService;
|
|
3163
|
+
}
|
|
3164
|
+
/**
|
|
3165
|
+
* Creates a new Project
|
|
3166
|
+
*/
|
|
3167
|
+
async create(props) {
|
|
3168
|
+
createProjectSchema.parse(props);
|
|
3169
|
+
const id = uuid();
|
|
3170
|
+
const projectFile = {
|
|
3171
|
+
...props,
|
|
3172
|
+
objectType: "project",
|
|
3173
|
+
id,
|
|
3174
|
+
created: datetime(),
|
|
3175
|
+
updated: null,
|
|
3176
|
+
coreVersion: this.coreVersion,
|
|
3177
|
+
status: "todo",
|
|
3178
|
+
version: "0.0.1"
|
|
3179
|
+
};
|
|
3180
|
+
const projectPath = pathTo.project(id);
|
|
3181
|
+
await Fs.ensureDir(projectPath);
|
|
3182
|
+
try {
|
|
3183
|
+
await this.createFolderStructure(projectPath);
|
|
3184
|
+
await this.createGitignore(projectPath);
|
|
3185
|
+
await this.gitService.init(projectPath, { initialBranch: projectBranchSchema.enum.production });
|
|
3186
|
+
await this.jsonFileService.create(projectFile, pathTo.projectFile(id), projectFileSchema);
|
|
3187
|
+
await this.gitService.add(projectPath, ["."]);
|
|
3188
|
+
await this.gitService.commit(projectPath, {
|
|
3189
|
+
method: "create",
|
|
3190
|
+
reference: {
|
|
3191
|
+
objectType: "project",
|
|
3192
|
+
id
|
|
3193
|
+
}
|
|
3194
|
+
});
|
|
3195
|
+
await this.gitService.branches.switch(projectPath, projectBranchSchema.enum.work, { isNew: true });
|
|
3196
|
+
} catch (error) {
|
|
3197
|
+
await this.delete({
|
|
3198
|
+
id,
|
|
3199
|
+
force: true
|
|
3200
|
+
});
|
|
3201
|
+
throw error;
|
|
3202
|
+
}
|
|
3203
|
+
return await this.toProject(projectFile);
|
|
3204
|
+
}
|
|
3205
|
+
/**
|
|
3206
|
+
* Clones a Project by URL
|
|
3207
|
+
*/
|
|
3208
|
+
async clone(props) {
|
|
3209
|
+
cloneProjectSchema.parse(props);
|
|
3210
|
+
const tmpId = uuid();
|
|
3211
|
+
const tmpProjectPath = Path.join(pathTo.tmp, tmpId);
|
|
3212
|
+
await this.gitService.clone(props.url, tmpProjectPath);
|
|
3213
|
+
const projectFile = await this.jsonFileService.read(Path.join(tmpProjectPath, "project.json"), projectFileSchema);
|
|
3214
|
+
const projectPath = pathTo.project(projectFile.id);
|
|
3215
|
+
if (await Fs.pathExists(projectPath)) throw new Error(`Tried to clone Project "${projectFile.id}" from "${props.url}" - but the Project already exists locally`);
|
|
3216
|
+
await Fs.copy(tmpProjectPath, projectPath);
|
|
3217
|
+
await Fs.remove(tmpProjectPath);
|
|
3218
|
+
return await this.toProject(projectFile);
|
|
3219
|
+
}
|
|
3220
|
+
/**
|
|
3221
|
+
* Returns a Project by ID
|
|
3222
|
+
*
|
|
3223
|
+
* If a commit hash is provided, the Project is read from history
|
|
3224
|
+
*/
|
|
3225
|
+
async read(props) {
|
|
3226
|
+
readProjectSchema.parse(props);
|
|
3227
|
+
if (!props.commitHash) {
|
|
3228
|
+
const projectFile = await this.jsonFileService.read(pathTo.projectFile(props.id), projectFileSchema);
|
|
3229
|
+
return await this.toProject(projectFile);
|
|
3230
|
+
} else {
|
|
3231
|
+
const projectFile = this.migrate(migrateProjectSchema.parse(JSON.parse(await this.gitService.getFileContentAtCommit(pathTo.project(props.id), pathTo.projectFile(props.id), props.commitHash))));
|
|
3232
|
+
return await this.toProject(projectFile);
|
|
3233
|
+
}
|
|
3234
|
+
}
|
|
3235
|
+
/**
|
|
3236
|
+
* Updates given Project
|
|
3237
|
+
*/
|
|
3238
|
+
async update(props) {
|
|
3239
|
+
updateProjectSchema.parse(props);
|
|
3240
|
+
const projectPath = pathTo.project(props.id);
|
|
3241
|
+
const filePath = pathTo.projectFile(props.id);
|
|
3242
|
+
const projectFile = {
|
|
3243
|
+
...await this.read(props),
|
|
3244
|
+
...props,
|
|
3245
|
+
updated: datetime()
|
|
3246
|
+
};
|
|
3247
|
+
await this.jsonFileService.update(projectFile, filePath, projectFileSchema);
|
|
3248
|
+
await this.gitService.add(projectPath, [filePath]);
|
|
3249
|
+
await this.gitService.commit(projectPath, {
|
|
3250
|
+
method: "update",
|
|
3251
|
+
reference: {
|
|
3252
|
+
objectType: "project",
|
|
3253
|
+
id: projectFile.id
|
|
3254
|
+
}
|
|
3255
|
+
});
|
|
3256
|
+
return await this.toProject(projectFile);
|
|
3257
|
+
}
|
|
3258
|
+
/**
|
|
3259
|
+
* Upgrades given Project to the current version of Core
|
|
3260
|
+
*
|
|
3261
|
+
* Needed when a new Core version is requiring changes to existing files or structure.
|
|
3262
|
+
*/
|
|
3263
|
+
async upgrade(props) {
|
|
3264
|
+
upgradeProjectSchema.parse(props);
|
|
3265
|
+
const projectPath = pathTo.project(props.id);
|
|
3266
|
+
const projectFilePath = pathTo.projectFile(props.id);
|
|
3267
|
+
if (await this.gitService.branches.current(projectPath) !== projectBranchSchema.enum.work) await this.gitService.branches.switch(projectPath, projectBranchSchema.enum.work);
|
|
3268
|
+
const currentProjectFile = migrateProjectSchema.parse(await this.jsonFileService.unsafeRead(projectFilePath));
|
|
3269
|
+
if (Semver.gt(currentProjectFile.coreVersion, this.coreVersion)) throw new ProjectUpgradeError(`The Projects Core version "${currentProjectFile.coreVersion}" is higher than the current Core version "${this.coreVersion}".`);
|
|
3270
|
+
if (Semver.eq(currentProjectFile.coreVersion, this.coreVersion) && props.force !== true) throw new ProjectUpgradeError(`The Projects Core version "${currentProjectFile.coreVersion}" is already up to date.`);
|
|
3271
|
+
const assetReferences = await this.listReferences("asset", props.id);
|
|
3272
|
+
const collectionReferences = await this.listReferences("collection", props.id);
|
|
3273
|
+
this.logService.info({
|
|
3274
|
+
source: "core",
|
|
3275
|
+
message: `Attempting to upgrade Project "${props.id}" from Core version ${currentProjectFile.coreVersion} to ${this.coreVersion}`
|
|
3276
|
+
});
|
|
3277
|
+
const upgradeBranchName = `upgrade/core-${currentProjectFile.coreVersion}-to-${this.coreVersion}`;
|
|
3278
|
+
await this.gitService.branches.switch(projectPath, upgradeBranchName, { isNew: true });
|
|
3279
|
+
try {
|
|
3280
|
+
await Promise.all(assetReferences.map(async (reference) => {
|
|
3281
|
+
await this.upgradeObjectFile(props.id, "asset", reference);
|
|
3282
|
+
}));
|
|
3283
|
+
await Promise.all(collectionReferences.map(async (reference) => {
|
|
3284
|
+
await this.upgradeObjectFile(props.id, "collection", reference);
|
|
3285
|
+
}));
|
|
3286
|
+
await Promise.all(collectionReferences.map(async (collectionReference) => {
|
|
3287
|
+
const entryReferences = await this.listReferences("entry", props.id, collectionReference.id);
|
|
3288
|
+
await Promise.all(entryReferences.map(async (reference) => {
|
|
3289
|
+
await this.upgradeObjectFile(props.id, "entry", reference, collectionReference.id);
|
|
3290
|
+
}));
|
|
3291
|
+
}));
|
|
3292
|
+
const migratedProjectFile = this.migrate(currentProjectFile);
|
|
3293
|
+
await this.update(migratedProjectFile);
|
|
3294
|
+
await this.gitService.branches.switch(projectPath, projectBranchSchema.enum.work);
|
|
3295
|
+
await this.gitService.merge(projectPath, upgradeBranchName, { squash: true });
|
|
3296
|
+
await this.gitService.commit(projectPath, {
|
|
3297
|
+
method: "upgrade",
|
|
3298
|
+
reference: {
|
|
3299
|
+
objectType: "project",
|
|
3300
|
+
id: migratedProjectFile.id
|
|
3301
|
+
}
|
|
3302
|
+
});
|
|
3303
|
+
await this.gitService.tags.create({
|
|
3304
|
+
path: projectPath,
|
|
3305
|
+
message: `Upgraded Project to Core version ${migratedProjectFile.coreVersion}`
|
|
3306
|
+
});
|
|
3307
|
+
await this.gitService.branches.delete(projectPath, upgradeBranchName, true);
|
|
3308
|
+
this.logService.info({
|
|
3309
|
+
source: "core",
|
|
3310
|
+
message: `Successfully upgraded Project "${props.id}" to Core version "${this.coreVersion}"`,
|
|
3311
|
+
meta: {
|
|
3312
|
+
previous: currentProjectFile,
|
|
3313
|
+
migrated: migratedProjectFile
|
|
3314
|
+
}
|
|
3315
|
+
});
|
|
3316
|
+
} catch (error) {
|
|
3317
|
+
await this.gitService.branches.switch(projectPath, projectBranchSchema.enum.work);
|
|
3318
|
+
await this.gitService.branches.delete(projectPath, upgradeBranchName, true);
|
|
3319
|
+
throw error;
|
|
3320
|
+
}
|
|
3321
|
+
}
|
|
3322
|
+
branches = {
|
|
3323
|
+
list: async (props) => {
|
|
3324
|
+
listBranchesProjectSchema.parse(props);
|
|
3325
|
+
const projectPath = pathTo.project(props.id);
|
|
3326
|
+
if (await this.gitService.remotes.hasOrigin(projectPath)) await this.gitService.fetch(projectPath);
|
|
3327
|
+
return await this.gitService.branches.list(projectPath);
|
|
3328
|
+
},
|
|
3329
|
+
current: async (props) => {
|
|
3330
|
+
currentBranchProjectSchema.parse(props);
|
|
3331
|
+
const projectPath = pathTo.project(props.id);
|
|
3332
|
+
return await this.gitService.branches.current(projectPath);
|
|
3333
|
+
},
|
|
3334
|
+
switch: async (props) => {
|
|
3335
|
+
switchBranchProjectSchema.parse(props);
|
|
3336
|
+
const projectPath = pathTo.project(props.id);
|
|
3337
|
+
return await this.gitService.branches.switch(projectPath, props.branch, props.options);
|
|
3338
|
+
}
|
|
3339
|
+
};
|
|
3340
|
+
/**
|
|
3341
|
+
* Updates the remote origin URL of given Project
|
|
3342
|
+
*
|
|
3343
|
+
* @todo maybe add this logic to the update method
|
|
3344
|
+
*/
|
|
3345
|
+
async setRemoteOriginUrl(props) {
|
|
3346
|
+
setRemoteOriginUrlProjectSchema.parse(props);
|
|
3347
|
+
const projectPath = pathTo.project(props.id);
|
|
3348
|
+
if (!await this.gitService.remotes.hasOrigin(projectPath)) await this.gitService.remotes.addOrigin(projectPath, props.url);
|
|
3349
|
+
else await this.gitService.remotes.setOriginUrl(projectPath, props.url);
|
|
3350
|
+
}
|
|
3351
|
+
/**
|
|
3352
|
+
* Returns the differences of the given Projects current branch
|
|
3353
|
+
* between the local and remote `origin` (commits ahead & behind)
|
|
3354
|
+
*
|
|
3355
|
+
* Throws an error if the Project does not have a remote origin.
|
|
3356
|
+
*
|
|
3357
|
+
* - `behind` contains a list of commits on the current branch that are available on the remote `origin` but not yet locally
|
|
3358
|
+
* - `ahead` contains a list of commits on the current branch that are available locally but not yet on the remote `origin`
|
|
3359
|
+
*/
|
|
3360
|
+
async getChanges(props) {
|
|
3361
|
+
getChangesProjectSchema.parse(props);
|
|
3362
|
+
const projectPath = pathTo.project(props.id);
|
|
3363
|
+
if (await this.gitService.remotes.hasOrigin(projectPath) === false) throw new Error(`Project "${props.id}" does not have a remote origin`);
|
|
3364
|
+
const currentBranch = await this.gitService.branches.current(projectPath);
|
|
3365
|
+
await this.gitService.fetch(projectPath);
|
|
3366
|
+
return {
|
|
3367
|
+
behind: await this.gitService.log(projectPath, { between: {
|
|
3368
|
+
from: currentBranch,
|
|
3369
|
+
to: `origin/${currentBranch}`
|
|
3370
|
+
} }),
|
|
3371
|
+
ahead: await this.gitService.log(projectPath, { between: {
|
|
3372
|
+
from: `origin/${currentBranch}`,
|
|
3373
|
+
to: currentBranch
|
|
3374
|
+
} })
|
|
3375
|
+
};
|
|
3376
|
+
}
|
|
3377
|
+
/**
|
|
3378
|
+
* Pulls remote changes of `origin` down to the local repository
|
|
3379
|
+
* and then pushes local commits to the upstream branch
|
|
3380
|
+
*/
|
|
3381
|
+
async synchronize(props) {
|
|
3382
|
+
synchronizeProjectSchema.parse(props);
|
|
3383
|
+
const projectPath = pathTo.project(props.id);
|
|
3384
|
+
await this.gitService.pull(projectPath);
|
|
3385
|
+
await this.gitService.push(projectPath);
|
|
3386
|
+
}
|
|
3387
|
+
/**
|
|
3388
|
+
* Deletes given Project
|
|
3389
|
+
*
|
|
3390
|
+
* Deletes the whole Project folder including the history, not only the config file.
|
|
3391
|
+
* Throws in case a Project is only available locally and could be lost forever,
|
|
3392
|
+
* or changes are not pushed to a remote yet.
|
|
3393
|
+
*/
|
|
3394
|
+
async delete(props) {
|
|
3395
|
+
deleteProjectSchema.parse(props);
|
|
3396
|
+
const hasRemoteOrigin = await this.gitService.remotes.hasOrigin(pathTo.project(props.id));
|
|
3397
|
+
if (hasRemoteOrigin === false && props.force !== true) throw new RemoteOriginMissingError(props.id);
|
|
3398
|
+
if (hasRemoteOrigin === true && props.force !== true) {
|
|
3399
|
+
if ((await this.getChanges({ id: props.id })).ahead.length > 0) throw new SynchronizeLocalChangesError(props.id);
|
|
3400
|
+
}
|
|
3401
|
+
await Fs.remove(pathTo.project(props.id));
|
|
3402
|
+
}
|
|
3403
|
+
/**
|
|
3404
|
+
* Lists outdated Projects that need to be upgraded
|
|
3405
|
+
*/
|
|
3406
|
+
async listOutdated() {
|
|
3407
|
+
const projectReferences = await this.listReferences(objectTypeSchema.enum.project);
|
|
3408
|
+
return (await Promise.all(projectReferences.map(async (reference) => {
|
|
3409
|
+
const json = await this.jsonFileService.unsafeRead(pathTo.projectFile(reference.id));
|
|
3410
|
+
const projectFile = migrateProjectSchema.parse(json);
|
|
3411
|
+
if (projectFile.coreVersion !== this.coreVersion) return projectFile;
|
|
3412
|
+
return null;
|
|
3413
|
+
}))).filter(isNotEmpty);
|
|
3414
|
+
}
|
|
3415
|
+
async list(props) {
|
|
3416
|
+
if (props) listProjectsSchema.parse(props);
|
|
3417
|
+
const offset = props?.offset || 0;
|
|
3418
|
+
const limit = props?.limit ?? 15;
|
|
3419
|
+
const projectReferences = await this.listReferences(objectTypeSchema.enum.project);
|
|
3420
|
+
const partialProjectReferences = limit === 0 ? projectReferences.slice(offset) : projectReferences.slice(offset, offset + limit);
|
|
3421
|
+
const projects = await this.returnResolved(partialProjectReferences.map((reference) => {
|
|
3422
|
+
return this.read({ id: reference.id });
|
|
3423
|
+
}));
|
|
3424
|
+
return {
|
|
3425
|
+
total: projectReferences.length,
|
|
3426
|
+
limit,
|
|
3427
|
+
offset,
|
|
3428
|
+
list: projects
|
|
3429
|
+
};
|
|
3430
|
+
}
|
|
3431
|
+
async count() {
|
|
3432
|
+
return (await this.listReferences(objectTypeSchema.enum.project)).length;
|
|
3433
|
+
}
|
|
3434
|
+
/**
|
|
3435
|
+
* Checks if given object is of type Project
|
|
3436
|
+
*/
|
|
3437
|
+
isProject(obj) {
|
|
3438
|
+
return projectFileSchema.safeParse(obj).success;
|
|
3439
|
+
}
|
|
3440
|
+
/**
|
|
3441
|
+
* Migrates an potentially outdated Project file to the current schema
|
|
3442
|
+
*/
|
|
3443
|
+
migrate(props) {
|
|
3444
|
+
props.coreVersion = this.coreVersion;
|
|
3445
|
+
return projectFileSchema.parse(props);
|
|
3446
|
+
}
|
|
3447
|
+
/**
|
|
3448
|
+
* Creates a Project from given ProjectFile
|
|
3449
|
+
*/
|
|
3450
|
+
async toProject(projectFile) {
|
|
3451
|
+
const projectPath = pathTo.project(projectFile.id);
|
|
3452
|
+
let remoteOriginUrl = null;
|
|
3453
|
+
if (await this.gitService.remotes.hasOrigin(projectPath)) remoteOriginUrl = await this.gitService.remotes.getOriginUrl(projectPath);
|
|
3454
|
+
const fullHistory = await this.gitService.log(pathTo.project(projectFile.id));
|
|
3455
|
+
const history = await this.gitService.log(pathTo.project(projectFile.id), { filePath: pathTo.projectFile(projectFile.id) });
|
|
3456
|
+
return {
|
|
3457
|
+
...projectFile,
|
|
3458
|
+
remoteOriginUrl,
|
|
3459
|
+
history,
|
|
3460
|
+
fullHistory
|
|
3461
|
+
};
|
|
3462
|
+
}
|
|
3463
|
+
/**
|
|
3464
|
+
* Creates the projects folder structure and makes sure to
|
|
3465
|
+
* write empty .gitkeep files inside them to ensure they are
|
|
3466
|
+
* committed
|
|
3467
|
+
*/
|
|
3468
|
+
async createFolderStructure(path) {
|
|
3469
|
+
const folders = Object.values(projectFolderSchema.enum);
|
|
3470
|
+
await Promise.all(folders.map(async (folder) => {
|
|
3471
|
+
await Fs.mkdirp(Path.join(path, folder));
|
|
3472
|
+
await Fs.writeFile(Path.join(path, folder, ".gitkeep"), "");
|
|
3473
|
+
}));
|
|
3474
|
+
}
|
|
3475
|
+
/**
|
|
3476
|
+
* Writes the Projects main .gitignore file to disk
|
|
3477
|
+
*
|
|
3478
|
+
* @todo Add general things to ignore
|
|
3479
|
+
* @see https://github.com/github/gitignore/tree/master/Global
|
|
3480
|
+
*/
|
|
3481
|
+
async createGitignore(path) {
|
|
3482
|
+
await Fs.writeFile(Path.join(path, ".gitignore"), [
|
|
3483
|
+
"# Ignore all hidden files and folders...",
|
|
3484
|
+
".*",
|
|
3485
|
+
"# ...but these",
|
|
3486
|
+
"!/.gitignore",
|
|
3487
|
+
"!/.gitattributes",
|
|
3488
|
+
"!/**/.gitkeep",
|
|
3489
|
+
"",
|
|
3490
|
+
"# elek.io related ignores"
|
|
3491
|
+
].join(Os.EOL));
|
|
3492
|
+
}
|
|
3493
|
+
async upgradeObjectFile(projectId, objectType, reference, collectionId) {
|
|
3494
|
+
switch (objectType) {
|
|
3495
|
+
case "asset": {
|
|
3496
|
+
const assetFilePath = pathTo.assetFile(projectId, reference.id);
|
|
3497
|
+
const prevAssetFile = await this.jsonFileService.unsafeRead(assetFilePath);
|
|
3498
|
+
const migratedAssetFile = this.assetService.migrate(prevAssetFile);
|
|
3499
|
+
await this.assetService.update({
|
|
3500
|
+
projectId,
|
|
3501
|
+
...migratedAssetFile
|
|
3502
|
+
});
|
|
3503
|
+
this.logService.info({
|
|
3504
|
+
source: "core",
|
|
3505
|
+
message: `Upgraded ${objectType} "${assetFilePath}"`,
|
|
3506
|
+
meta: {
|
|
3507
|
+
previous: prevAssetFile,
|
|
3508
|
+
migrated: migratedAssetFile
|
|
3509
|
+
}
|
|
3510
|
+
});
|
|
3511
|
+
return;
|
|
3512
|
+
}
|
|
3513
|
+
case "collection": {
|
|
3514
|
+
const collectionFilePath = pathTo.collectionFile(projectId, reference.id);
|
|
3515
|
+
const prevCollectionFile = await this.jsonFileService.unsafeRead(collectionFilePath);
|
|
3516
|
+
const migratedCollectionFile = this.collectionService.migrate(prevCollectionFile);
|
|
3517
|
+
await this.collectionService.update({
|
|
3518
|
+
projectId,
|
|
3519
|
+
...migratedCollectionFile
|
|
3520
|
+
});
|
|
3521
|
+
this.logService.info({
|
|
3522
|
+
source: "core",
|
|
3523
|
+
message: `Upgraded ${objectType} "${collectionFilePath}"`,
|
|
3524
|
+
meta: {
|
|
3525
|
+
previous: prevCollectionFile,
|
|
3526
|
+
migrated: migratedCollectionFile
|
|
3527
|
+
}
|
|
3528
|
+
});
|
|
3529
|
+
return;
|
|
3530
|
+
}
|
|
3531
|
+
case "entry": {
|
|
3532
|
+
if (!collectionId) throw new RequiredParameterMissingError("collectionId");
|
|
3533
|
+
const entryFilePath = pathTo.entryFile(projectId, collectionId, reference.id);
|
|
3534
|
+
const prevEntryFile = await this.jsonFileService.unsafeRead(entryFilePath);
|
|
3535
|
+
const migratedEntryFile = this.entryService.migrate(prevEntryFile);
|
|
3536
|
+
await this.entryService.update({
|
|
3537
|
+
projectId,
|
|
3538
|
+
collectionId,
|
|
3539
|
+
...migratedEntryFile
|
|
3540
|
+
});
|
|
3541
|
+
this.logService.info({
|
|
3542
|
+
source: "core",
|
|
3543
|
+
message: `Upgraded ${objectType} "${entryFilePath}"`,
|
|
3544
|
+
meta: {
|
|
3545
|
+
previous: prevEntryFile,
|
|
3546
|
+
migrated: migratedEntryFile
|
|
3547
|
+
}
|
|
3548
|
+
});
|
|
3549
|
+
return;
|
|
3550
|
+
}
|
|
3551
|
+
default: throw new Error(`Trying to upgrade unsupported object file of type "${objectType}"`);
|
|
3552
|
+
}
|
|
3553
|
+
}
|
|
3554
|
+
};
|
|
3555
|
+
|
|
3556
|
+
//#endregion
|
|
3557
|
+
//#region src/service/UserService.ts
|
|
3558
|
+
/**
|
|
3559
|
+
* Service to handle the User that is currently working with Core
|
|
3560
|
+
*/
|
|
3561
|
+
var UserService = class {
|
|
3562
|
+
logService;
|
|
3563
|
+
jsonFileService;
|
|
3564
|
+
constructor(logService, jsonFileService) {
|
|
3565
|
+
this.logService = logService;
|
|
3566
|
+
this.jsonFileService = jsonFileService;
|
|
3567
|
+
}
|
|
3568
|
+
/**
|
|
3569
|
+
* Returns the User currently working with Core
|
|
3570
|
+
*/
|
|
3571
|
+
async get() {
|
|
3572
|
+
try {
|
|
3573
|
+
return await this.jsonFileService.read(pathTo.userFile, userFileSchema);
|
|
3574
|
+
} catch {
|
|
3575
|
+
this.logService.info({
|
|
3576
|
+
source: "core",
|
|
3577
|
+
message: "No User found"
|
|
3578
|
+
});
|
|
3579
|
+
return null;
|
|
3580
|
+
}
|
|
3581
|
+
}
|
|
3582
|
+
/**
|
|
3583
|
+
* Sets the User currently working with Core
|
|
3584
|
+
*
|
|
3585
|
+
* By doing so all git operations are done with the signature of this User
|
|
3586
|
+
*/
|
|
3587
|
+
async set(props) {
|
|
3588
|
+
setUserSchema.parse(props);
|
|
3589
|
+
const userFilePath = pathTo.userFile;
|
|
3590
|
+
const userFile = { ...props };
|
|
3591
|
+
if (userFile.userType === UserTypeSchema.enum.cloud) {}
|
|
3592
|
+
await this.jsonFileService.update(userFile, userFilePath, userFileSchema);
|
|
3593
|
+
this.logService.debug({
|
|
3594
|
+
source: "core",
|
|
3595
|
+
message: "Updated User"
|
|
3596
|
+
});
|
|
3597
|
+
return userFile;
|
|
3598
|
+
}
|
|
3599
|
+
};
|
|
3600
|
+
|
|
3601
|
+
//#endregion
|
|
3602
|
+
//#region src/index.node.ts
|
|
3603
|
+
/**
|
|
3604
|
+
* elek.io Core
|
|
3605
|
+
*
|
|
3606
|
+
* Provides access to all services Core is offering
|
|
3607
|
+
*/
|
|
3608
|
+
var ElekIoCore = class {
|
|
3609
|
+
coreVersion;
|
|
3610
|
+
options;
|
|
3611
|
+
logService;
|
|
3612
|
+
userService;
|
|
3613
|
+
gitService;
|
|
3614
|
+
jsonFileService;
|
|
3615
|
+
assetService;
|
|
3616
|
+
projectService;
|
|
3617
|
+
collectionService;
|
|
3618
|
+
entryService;
|
|
3619
|
+
localApi;
|
|
3620
|
+
constructor(props) {
|
|
3621
|
+
this.coreVersion = package_default.version;
|
|
3622
|
+
const parsedProps = constructorElekIoCoreSchema.parse(props);
|
|
3623
|
+
this.options = Object.assign({}, {
|
|
3624
|
+
log: { level: "info" },
|
|
3625
|
+
file: { cache: true }
|
|
3626
|
+
}, parsedProps);
|
|
3627
|
+
this.logService = new LogService(this.options);
|
|
3628
|
+
this.jsonFileService = new JsonFileService(this.options, this.logService);
|
|
3629
|
+
this.userService = new UserService(this.logService, this.jsonFileService);
|
|
3630
|
+
this.gitService = new GitService(this.options, this.logService, this.userService);
|
|
3631
|
+
this.assetService = new AssetService(this.options, this.logService, this.jsonFileService, this.gitService);
|
|
3632
|
+
this.collectionService = new CollectionService(this.options, this.logService, this.jsonFileService, this.gitService);
|
|
3633
|
+
this.entryService = new EntryService(this.options, this.logService, this.jsonFileService, this.gitService, this.collectionService);
|
|
3634
|
+
this.projectService = new ProjectService(this.coreVersion, this.options, this.logService, this.jsonFileService, this.gitService, this.assetService, this.collectionService, this.entryService);
|
|
3635
|
+
this.localApi = new LocalApi(this.logService, this.projectService, this.collectionService, this.entryService, this.assetService);
|
|
3636
|
+
this.logService.info({
|
|
3637
|
+
source: "core",
|
|
3638
|
+
message: `Initializing elek.io Core ${this.coreVersion}`,
|
|
3639
|
+
meta: { options: this.options }
|
|
3640
|
+
});
|
|
3641
|
+
Fs.mkdirpSync(pathTo.projects);
|
|
3642
|
+
Fs.mkdirpSync(pathTo.tmp);
|
|
3643
|
+
Fs.emptyDirSync(pathTo.tmp);
|
|
3644
|
+
}
|
|
3645
|
+
/**
|
|
3646
|
+
* Exposes the logger
|
|
3647
|
+
*/
|
|
3648
|
+
get logger() {
|
|
3649
|
+
return this.logService;
|
|
3650
|
+
}
|
|
3651
|
+
/**
|
|
3652
|
+
* Utility / helper functions
|
|
3653
|
+
*/
|
|
3654
|
+
get util() {
|
|
3655
|
+
return node_exports;
|
|
3656
|
+
}
|
|
3657
|
+
/**
|
|
3658
|
+
* Exposes git functions
|
|
3659
|
+
*/
|
|
3660
|
+
get git() {
|
|
3661
|
+
return this.gitService;
|
|
3662
|
+
}
|
|
3663
|
+
/**
|
|
3664
|
+
* Getter and setter methods for the User currently working with Core
|
|
3665
|
+
*/
|
|
3666
|
+
get user() {
|
|
3667
|
+
return this.userService;
|
|
3668
|
+
}
|
|
3669
|
+
/**
|
|
3670
|
+
* CRUD methods to work with Projects
|
|
3671
|
+
*/
|
|
3672
|
+
get projects() {
|
|
3673
|
+
return this.projectService;
|
|
3674
|
+
}
|
|
3675
|
+
/**
|
|
3676
|
+
* CRUD methods to work with Assets
|
|
3677
|
+
*/
|
|
3678
|
+
get assets() {
|
|
3679
|
+
return this.assetService;
|
|
3680
|
+
}
|
|
3681
|
+
/**
|
|
3682
|
+
* CRUD methods to work with Collections
|
|
3683
|
+
*/
|
|
3684
|
+
get collections() {
|
|
3685
|
+
return this.collectionService;
|
|
3686
|
+
}
|
|
3687
|
+
/**
|
|
3688
|
+
* CRUD methods to work with Entries
|
|
3689
|
+
*/
|
|
3690
|
+
get entries() {
|
|
3691
|
+
return this.entryService;
|
|
3692
|
+
}
|
|
3693
|
+
/**
|
|
3694
|
+
* Allows starting and stopping a REST API
|
|
3695
|
+
* to allow developers to read local Project data
|
|
3696
|
+
*/
|
|
3697
|
+
get api() {
|
|
3698
|
+
return this.localApi;
|
|
3699
|
+
}
|
|
3700
|
+
};
|
|
3701
|
+
|
|
3702
|
+
//#endregion
|
|
3703
|
+
//#region src/astro/schema.ts
|
|
3704
|
+
/**
|
|
3705
|
+
* Generates a flat Zod object schema from collection field definitions
|
|
3706
|
+
* for use with Astro's `parseData` validation.
|
|
3707
|
+
*
|
|
3708
|
+
* Each key is the field definition ID (UUID) and each value schema
|
|
3709
|
+
* is the translatable content schema for that field type.
|
|
3710
|
+
*/
|
|
3711
|
+
function buildEntryValuesSchema(fieldDefinitions) {
|
|
3712
|
+
const shape = {};
|
|
3713
|
+
for (const fieldDef of fieldDefinitions) switch (fieldDef.valueType) {
|
|
3714
|
+
case ValueTypeSchema.enum.string:
|
|
3715
|
+
shape[fieldDef.id] = getTranslatableStringValueContentSchemaFromFieldDefinition(fieldDef);
|
|
3716
|
+
break;
|
|
3717
|
+
case ValueTypeSchema.enum.number:
|
|
3718
|
+
shape[fieldDef.id] = getTranslatableNumberValueContentSchemaFromFieldDefinition(fieldDef);
|
|
3719
|
+
break;
|
|
3720
|
+
case ValueTypeSchema.enum.boolean:
|
|
3721
|
+
shape[fieldDef.id] = getTranslatableBooleanValueContentSchemaFromFieldDefinition();
|
|
3722
|
+
break;
|
|
3723
|
+
case ValueTypeSchema.enum.reference:
|
|
3724
|
+
shape[fieldDef.id] = getTranslatableReferenceValueContentSchemaFromFieldDefinition(fieldDef);
|
|
3725
|
+
break;
|
|
3726
|
+
}
|
|
3727
|
+
return z.object(shape);
|
|
3728
|
+
}
|
|
3729
|
+
|
|
3730
|
+
//#endregion
|
|
3731
|
+
//#region src/astro/transform.ts
|
|
3732
|
+
/**
|
|
3733
|
+
* Transforms an elek.io Entry's values array into a flat object
|
|
3734
|
+
* keyed by field definition ID. Each value's translatable content
|
|
3735
|
+
* is preserved as-is.
|
|
3736
|
+
*/
|
|
3737
|
+
function transformEntryValues(values) {
|
|
3738
|
+
const result = {};
|
|
3739
|
+
for (const value of values) result[value.fieldDefinitionId] = value.content;
|
|
3740
|
+
return result;
|
|
3741
|
+
}
|
|
3742
|
+
|
|
3743
|
+
//#endregion
|
|
3744
|
+
//#region src/index.astro.ts
|
|
3745
|
+
const core = new ElekIoCore({ log: { level: "info" } });
|
|
3746
|
+
/**
|
|
3747
|
+
* Astro content loader for elek.io Assets.
|
|
3748
|
+
*
|
|
3749
|
+
* Reads and saves Assets from a Project and exposes them through
|
|
3750
|
+
* Astro's content collection system.
|
|
3751
|
+
*
|
|
3752
|
+
* @example
|
|
3753
|
+
* ```ts
|
|
3754
|
+
* // src/content.config.ts
|
|
3755
|
+
* import { defineCollection } from 'astro:content';
|
|
3756
|
+
* import { elekAssets } from '@elek-io/core/astro';
|
|
3757
|
+
*
|
|
3758
|
+
* export const collections = {
|
|
3759
|
+
* assets: defineCollection({
|
|
3760
|
+
* loader: elekAssets({
|
|
3761
|
+
* projectId: 'abc-123-...',
|
|
3762
|
+
* outDir: './content/assets',
|
|
3763
|
+
* }),
|
|
3764
|
+
* });
|
|
3765
|
+
* };
|
|
3766
|
+
* ```
|
|
3767
|
+
*/
|
|
3768
|
+
function elekAssets(props) {
|
|
3769
|
+
return {
|
|
3770
|
+
name: "elek-assets",
|
|
3771
|
+
schema: () => assetSchema,
|
|
3772
|
+
load: async (context) => {
|
|
3773
|
+
context.logger.info(`Loading elek.io Assets for Project "${props.projectId}", saving to "${props.outDir}"`);
|
|
3774
|
+
context.store.clear();
|
|
3775
|
+
const { list: assets, total } = await core.assets.list({
|
|
3776
|
+
projectId: props.projectId,
|
|
3777
|
+
limit: 0
|
|
3778
|
+
});
|
|
3779
|
+
if (total === 0) context.logger.warn("No Assets found");
|
|
3780
|
+
else context.logger.info(`Found ${total} Assets`);
|
|
3781
|
+
for (const asset of assets) {
|
|
3782
|
+
const absoluteAssetFilePath = Path.resolve(Path.join(props.outDir, `${asset.id}.${asset.extension}`));
|
|
3783
|
+
await Fs.ensureDir(Path.dirname(absoluteAssetFilePath));
|
|
3784
|
+
await core.assets.save({
|
|
3785
|
+
projectId: props.projectId,
|
|
3786
|
+
id: asset.id,
|
|
3787
|
+
filePath: absoluteAssetFilePath
|
|
3788
|
+
});
|
|
3789
|
+
const parsed = await context.parseData({
|
|
3790
|
+
id: asset.id,
|
|
3791
|
+
data: {
|
|
3792
|
+
...asset,
|
|
3793
|
+
absolutePath: absoluteAssetFilePath
|
|
3794
|
+
}
|
|
3795
|
+
});
|
|
3796
|
+
context.store.set({
|
|
3797
|
+
id: asset.id,
|
|
3798
|
+
data: parsed
|
|
3799
|
+
});
|
|
3800
|
+
}
|
|
3801
|
+
context.logger.info("Finished loading Assets");
|
|
3802
|
+
}
|
|
3803
|
+
};
|
|
3804
|
+
}
|
|
3805
|
+
/**
|
|
3806
|
+
* Astro content loader for elek.io Collection Entries.
|
|
3807
|
+
*
|
|
3808
|
+
* Reads all Entries from a Collection and exposes them through
|
|
3809
|
+
* Astro's content collection system.
|
|
3810
|
+
*
|
|
3811
|
+
* @example
|
|
3812
|
+
* ```ts
|
|
3813
|
+
* // src/content.config.ts
|
|
3814
|
+
* import { defineCollection } from 'astro:content';
|
|
3815
|
+
* import { elekEntries } from '@elek-io/core/astro';
|
|
3816
|
+
*
|
|
3817
|
+
* export const collections = {
|
|
3818
|
+
* entries: defineCollection({
|
|
3819
|
+
* loader: elekEntries({
|
|
3820
|
+
* projectId: 'abc-123-...',
|
|
3821
|
+
* collectionId: 'def-456-...',
|
|
3822
|
+
* }),
|
|
3823
|
+
* });
|
|
3824
|
+
* };
|
|
3825
|
+
* ```
|
|
3826
|
+
*/
|
|
3827
|
+
function elekEntries(props) {
|
|
3828
|
+
return {
|
|
3829
|
+
name: "elek-entries",
|
|
3830
|
+
schema: async () => {
|
|
3831
|
+
return buildEntryValuesSchema((await core.collections.read({
|
|
3832
|
+
projectId: props.projectId,
|
|
3833
|
+
id: props.collectionId
|
|
3834
|
+
})).fieldDefinitions);
|
|
3835
|
+
},
|
|
3836
|
+
load: async (context) => {
|
|
3837
|
+
context.logger.info(`Loading elek.io Entries of Collection "${props.collectionId}" and Project "${props.projectId}"`);
|
|
3838
|
+
context.store.clear();
|
|
3839
|
+
const { list: entries, total } = await core.entries.list({
|
|
3840
|
+
projectId: props.projectId,
|
|
3841
|
+
collectionId: props.collectionId,
|
|
3842
|
+
limit: 0
|
|
3843
|
+
});
|
|
3844
|
+
if (total === 0) context.logger.warn("No Entries found");
|
|
3845
|
+
else context.logger.info(`Found ${total} Entries`);
|
|
3846
|
+
for (const entry of entries) {
|
|
3847
|
+
const values = transformEntryValues(entry.values);
|
|
3848
|
+
const parsed = await context.parseData({
|
|
3849
|
+
id: entry.id,
|
|
3850
|
+
data: values
|
|
3851
|
+
});
|
|
3852
|
+
context.store.set({
|
|
3853
|
+
id: entry.id,
|
|
3854
|
+
data: parsed
|
|
3855
|
+
});
|
|
3856
|
+
}
|
|
3857
|
+
context.logger.info("Finished loading Entries");
|
|
3858
|
+
}
|
|
3859
|
+
};
|
|
3860
|
+
}
|
|
3861
|
+
|
|
3862
|
+
//#endregion
|
|
3863
|
+
export { elekAssets, elekEntries };
|
|
3864
|
+
//# sourceMappingURL=index.astro.mjs.map
|