storybooker 0.19.3 → 0.22.0-canary.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -18
- package/dist/adapters/_internal/queue.d.mts +127 -0
- package/dist/aws-dynamodb.d.mts +22 -0
- package/dist/aws-dynamodb.mjs +118 -0
- package/dist/aws-dynamodb.mjs.map +1 -0
- package/dist/aws-s3.d.mts +20 -0
- package/dist/aws-s3.mjs +96 -0
- package/dist/aws-s3.mjs.map +1 -0
- package/dist/azure-blob-storage.d.mts +20 -0
- package/dist/azure-blob-storage.mjs +126 -0
- package/dist/azure-blob-storage.mjs.map +1 -0
- package/dist/azure-cosmos-db.d.mts +23 -0
- package/dist/azure-cosmos-db.mjs +87 -0
- package/dist/azure-cosmos-db.mjs.map +1 -0
- package/dist/azure-data-tables.d.mts +23 -0
- package/dist/azure-data-tables.mjs +127 -0
- package/dist/azure-data-tables.mjs.map +1 -0
- package/dist/azure-easy-auth.d.mts +50 -0
- package/dist/azure-easy-auth.mjs +88 -0
- package/dist/azure-easy-auth.mjs.map +1 -0
- package/dist/azure-functions.d.mts +62 -0
- package/dist/azure-functions.mjs +147 -0
- package/dist/azure-functions.mjs.map +1 -0
- package/dist/fs.d.mts +37 -0
- package/dist/fs.mjs +240 -0
- package/dist/fs.mjs.map +1 -0
- package/dist/gcp-big-table.d.mts +23 -0
- package/dist/gcp-big-table.mjs +92 -0
- package/dist/gcp-big-table.mjs.map +1 -0
- package/dist/gcp-firestore.d.mts +22 -0
- package/dist/gcp-firestore.mjs +87 -0
- package/dist/gcp-firestore.mjs.map +1 -0
- package/dist/gcp-storage.d.mts +20 -0
- package/dist/gcp-storage.mjs +96 -0
- package/dist/gcp-storage.mjs.map +1 -0
- package/dist/handlers/handle-process-zip.mjs +90 -0
- package/dist/handlers/handle-process-zip.mjs.map +1 -0
- package/dist/handlers/handle-purge.d.mts +12 -0
- package/dist/handlers/handle-purge.mjs +36 -0
- package/dist/handlers/handle-purge.mjs.map +1 -0
- package/dist/handlers/handle-serve-storybook.mjs +94 -0
- package/dist/handlers/handle-serve-storybook.mjs.map +1 -0
- package/dist/index.d.mts +28 -0
- package/dist/index.mjs +62 -0
- package/dist/index.mjs.map +1 -0
- package/dist/models/builds-model.mjs +248 -0
- package/dist/models/builds-model.mjs.map +1 -0
- package/dist/models/builds-schema.d.mts +171 -0
- package/dist/models/builds-schema.mjs +67 -0
- package/dist/models/builds-schema.mjs.map +1 -0
- package/dist/models/projects-model.mjs +122 -0
- package/dist/models/projects-model.mjs.map +1 -0
- package/dist/models/projects-schema.d.mts +70 -0
- package/dist/models/projects-schema.mjs +37 -0
- package/dist/models/projects-schema.mjs.map +1 -0
- package/dist/models/tags-model.mjs +110 -0
- package/dist/models/tags-model.mjs.map +1 -0
- package/dist/models/tags-schema.d.mts +76 -0
- package/dist/models/tags-schema.mjs +34 -0
- package/dist/models/tags-schema.mjs.map +1 -0
- package/dist/models/~model.mjs +43 -0
- package/dist/models/~model.mjs.map +1 -0
- package/dist/models/~shared-schema.d.mts +1 -0
- package/dist/models/~shared-schema.mjs +20 -0
- package/dist/models/~shared-schema.mjs.map +1 -0
- package/dist/mysql.d.mts +39 -0
- package/dist/mysql.mjs +151 -0
- package/dist/mysql.mjs.map +1 -0
- package/dist/redis.d.mts +33 -0
- package/dist/redis.mjs +118 -0
- package/dist/redis.mjs.map +1 -0
- package/dist/routers/account-router.mjs +91 -0
- package/dist/routers/account-router.mjs.map +1 -0
- package/dist/routers/builds-router.mjs +347 -0
- package/dist/routers/builds-router.mjs.map +1 -0
- package/dist/routers/projects-router.mjs +236 -0
- package/dist/routers/projects-router.mjs.map +1 -0
- package/dist/routers/root-router.mjs +108 -0
- package/dist/routers/root-router.mjs.map +1 -0
- package/dist/routers/tags-router.mjs +269 -0
- package/dist/routers/tags-router.mjs.map +1 -0
- package/dist/routers/tasks-router.mjs +71 -0
- package/dist/routers/tasks-router.mjs.map +1 -0
- package/dist/urls.d.mts +47 -0
- package/dist/urls.mjs +208 -0
- package/dist/urls.mjs.map +1 -0
- package/dist/utils/adapter-utils.d.mts +14 -0
- package/dist/utils/adapter-utils.mjs +14 -0
- package/dist/utils/adapter-utils.mjs.map +1 -0
- package/dist/utils/auth.mjs +25 -0
- package/dist/utils/auth.mjs.map +1 -0
- package/dist/utils/error.d.mts +21 -0
- package/dist/utils/error.mjs +109 -0
- package/dist/utils/error.mjs.map +1 -0
- package/dist/utils/file-utils.mjs +16 -0
- package/dist/utils/file-utils.mjs.map +1 -0
- package/dist/utils/openapi-utils.mjs +45 -0
- package/dist/utils/openapi-utils.mjs.map +1 -0
- package/dist/utils/request.mjs +35 -0
- package/dist/utils/request.mjs.map +1 -0
- package/dist/utils/response.mjs +24 -0
- package/dist/utils/response.mjs.map +1 -0
- package/dist/utils/store.mjs +54 -0
- package/dist/utils/store.mjs.map +1 -0
- package/dist/utils/ui-utils.mjs +38 -0
- package/dist/utils/ui-utils.mjs.map +1 -0
- package/dist/utils/url-utils.d.mts +10 -0
- package/dist/utils/url-utils.mjs +54 -0
- package/dist/utils/url-utils.mjs.map +1 -0
- package/dist/~internal/adapter/auth.d.mts +123 -0
- package/dist/~internal/adapter/auth.mjs +20 -0
- package/dist/~internal/adapter/auth.mjs.map +1 -0
- package/dist/~internal/adapter/database.d.mts +240 -0
- package/dist/~internal/adapter/database.mjs +63 -0
- package/dist/~internal/adapter/database.mjs.map +1 -0
- package/dist/~internal/adapter/logger.d.mts +34 -0
- package/dist/~internal/adapter/logger.mjs +13 -0
- package/dist/~internal/adapter/logger.mjs.map +1 -0
- package/dist/~internal/adapter/storage.d.mts +208 -0
- package/dist/~internal/adapter/storage.mjs +63 -0
- package/dist/~internal/adapter/storage.mjs.map +1 -0
- package/dist/~internal/adapter/ui.d.mts +109 -0
- package/dist/~internal/adapter/ui.mjs +1 -0
- package/dist/~internal/adapter.d.mts +8 -0
- package/dist/~internal/adapter.mjs +6 -0
- package/dist/~internal/constants.d.mts +24 -0
- package/dist/~internal/constants.mjs +32 -0
- package/dist/~internal/constants.mjs.map +1 -0
- package/dist/~internal/mimes.d.mts +449 -0
- package/dist/~internal/mimes.mjs +454 -0
- package/dist/~internal/mimes.mjs.map +1 -0
- package/dist/~internal/router.d.mts +1651 -0
- package/dist/~internal/router.mjs +39 -0
- package/dist/~internal/router.mjs.map +1 -0
- package/dist/~internal/types.d.mts +77 -0
- package/dist/~internal/types.mjs +1 -0
- package/dist/~internal/utils.d.mts +4 -0
- package/dist/~internal/utils.mjs +5 -0
- package/openapi.json +3162 -0
- package/package.json +148 -27
- package/src/adapters/_internal/auth.ts +135 -0
- package/src/adapters/_internal/database.ts +241 -0
- package/src/adapters/_internal/index.ts +8 -0
- package/src/adapters/_internal/logger.ts +41 -0
- package/src/adapters/_internal/queue.ts +151 -0
- package/src/adapters/_internal/storage.ts +197 -0
- package/src/adapters/_internal/ui.ts +103 -0
- package/src/adapters/aws-dynamodb.ts +201 -0
- package/src/adapters/aws-s3.ts +160 -0
- package/src/adapters/azure-blob-storage.ts +223 -0
- package/src/adapters/azure-cosmos-db.ts +158 -0
- package/src/adapters/azure-data-tables.ts +223 -0
- package/src/adapters/azure-easy-auth.ts +174 -0
- package/src/adapters/azure-functions.ts +242 -0
- package/src/adapters/fs.ts +398 -0
- package/src/adapters/gcp-big-table.ts +157 -0
- package/src/adapters/gcp-firestore.ts +146 -0
- package/src/adapters/gcp-storage.ts +141 -0
- package/src/adapters/mysql.ts +296 -0
- package/src/adapters/redis.ts +242 -0
- package/src/handlers/handle-process-zip.ts +117 -0
- package/src/handlers/handle-purge.ts +65 -0
- package/src/handlers/handle-serve-storybook.ts +101 -0
- package/src/index.ts +81 -16
- package/src/mocks/mock-auth-service.ts +51 -0
- package/src/mocks/mock-store.ts +26 -0
- package/src/models/builds-model.ts +373 -0
- package/src/models/builds-schema.ts +84 -0
- package/src/models/projects-model.ts +177 -0
- package/src/models/projects-schema.ts +69 -0
- package/src/models/tags-model.ts +138 -0
- package/src/models/tags-schema.ts +45 -0
- package/src/models/~model.ts +79 -0
- package/src/models/~shared-schema.ts +14 -0
- package/src/routers/_app-router.ts +57 -0
- package/src/routers/account-router.ts +136 -0
- package/src/routers/builds-router.ts +464 -0
- package/src/routers/projects-router.ts +309 -0
- package/src/routers/root-router.ts +127 -0
- package/src/routers/tags-router.ts +339 -0
- package/src/routers/tasks-router.ts +75 -0
- package/src/types.ts +107 -0
- package/src/urls.ts +327 -0
- package/src/utils/adapter-utils.ts +26 -0
- package/src/utils/auth.test.ts +71 -0
- package/src/utils/auth.ts +39 -0
- package/src/utils/constants.ts +31 -0
- package/src/utils/date-utils.ts +10 -0
- package/src/utils/error.test.ts +86 -0
- package/src/utils/error.ts +140 -0
- package/src/utils/file-utils.test.ts +65 -0
- package/src/utils/file-utils.ts +43 -0
- package/src/utils/index.ts +3 -0
- package/src/utils/mime-utils.ts +457 -0
- package/src/utils/openapi-utils.ts +49 -0
- package/src/utils/request.ts +97 -0
- package/src/utils/response.ts +20 -0
- package/src/utils/store.ts +85 -0
- package/src/utils/story-utils.ts +42 -0
- package/src/utils/text-utils.ts +10 -0
- package/src/utils/ui-utils.ts +57 -0
- package/src/utils/url-utils.ts +113 -0
- package/dist/index.js +0 -554
- package/src/commands/create.ts +0 -263
- package/src/commands/purge.ts +0 -70
- package/src/commands/test.ts +0 -42
- package/src/service-schema.d.ts +0 -2023
- package/src/utils/auth-utils.ts +0 -31
- package/src/utils/pkg-utils.ts +0 -37
- package/src/utils/sb-build.ts +0 -55
- package/src/utils/sb-test.ts +0 -115
- package/src/utils/schema-utils.ts +0 -123
- package/src/utils/stream-utils.ts +0 -72
- package/src/utils/types.ts +0 -4
- package/src/utils/zip.ts +0 -77
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { DatabaseAdapterErrors } from "./~internal/adapter/database.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/adapters/gcp-firestore.ts
|
|
4
|
+
var GcpFirestoreDatabaseAdapter = class {
|
|
5
|
+
#instance;
|
|
6
|
+
constructor(instance) {
|
|
7
|
+
this.#instance = instance;
|
|
8
|
+
}
|
|
9
|
+
metadata = { name: "Google Cloud Firestore" };
|
|
10
|
+
listCollections = async (_options) => {
|
|
11
|
+
try {
|
|
12
|
+
return (await this.#instance.listCollections()).map((col) => col.id);
|
|
13
|
+
} catch (error) {
|
|
14
|
+
throw new DatabaseAdapterErrors.DatabaseNotInitializedError(error);
|
|
15
|
+
}
|
|
16
|
+
};
|
|
17
|
+
createCollection = async (_collectionId, _options) => {};
|
|
18
|
+
hasCollection = async (collectionId, _options) => {
|
|
19
|
+
return !(await this.#instance.collection(collectionId).limit(1).get()).empty;
|
|
20
|
+
};
|
|
21
|
+
deleteCollection = async (collectionId, _options) => {
|
|
22
|
+
try {
|
|
23
|
+
const snapshot = await this.#instance.collection(collectionId).get();
|
|
24
|
+
if (snapshot.empty) return;
|
|
25
|
+
const batch = this.#instance.batch();
|
|
26
|
+
for (const doc of snapshot.docs) batch.delete(doc.ref);
|
|
27
|
+
await batch.commit();
|
|
28
|
+
} catch (error) {
|
|
29
|
+
throw new DatabaseAdapterErrors.CollectionDoesNotExistError(collectionId, error);
|
|
30
|
+
}
|
|
31
|
+
};
|
|
32
|
+
listDocuments = async (collectionId, _listOptions, _options) => {
|
|
33
|
+
try {
|
|
34
|
+
const snapshot = await this.#instance.collection(collectionId).get();
|
|
35
|
+
const list = [];
|
|
36
|
+
for (const doc of snapshot.docs) {
|
|
37
|
+
const data = doc.data();
|
|
38
|
+
list.push({
|
|
39
|
+
...data,
|
|
40
|
+
id: doc.id
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
return list;
|
|
44
|
+
} catch (error) {
|
|
45
|
+
throw new DatabaseAdapterErrors.CollectionDoesNotExistError(collectionId, error);
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
getDocument = async (collectionId, documentId, _options) => {
|
|
49
|
+
const doc = await this.#instance.collection(collectionId).doc(documentId).get();
|
|
50
|
+
if (!doc.exists) throw new DatabaseAdapterErrors.DocumentDoesNotExistError(collectionId, documentId);
|
|
51
|
+
return {
|
|
52
|
+
...doc.data(),
|
|
53
|
+
id: doc.id
|
|
54
|
+
};
|
|
55
|
+
};
|
|
56
|
+
createDocument = async (collectionId, documentData, _options) => {
|
|
57
|
+
try {
|
|
58
|
+
await this.#instance.collection(collectionId).doc(documentData.id).create(documentData);
|
|
59
|
+
} catch (error) {
|
|
60
|
+
throw new DatabaseAdapterErrors.DocumentAlreadyExistsError(collectionId, documentData.id, error);
|
|
61
|
+
}
|
|
62
|
+
};
|
|
63
|
+
hasDocument = async (collectionId, documentId, _options) => {
|
|
64
|
+
return (await this.#instance.collection(collectionId).doc(documentId).get()).exists;
|
|
65
|
+
};
|
|
66
|
+
deleteDocument = async (collectionId, documentId, _options) => {
|
|
67
|
+
try {
|
|
68
|
+
await this.#instance.collection(collectionId).doc(documentId).delete();
|
|
69
|
+
} catch (error) {
|
|
70
|
+
throw new DatabaseAdapterErrors.DocumentDoesNotExistError(collectionId, documentId, error);
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
updateDocument = async (collectionId, documentId, documentData) => {
|
|
74
|
+
try {
|
|
75
|
+
await this.#instance.collection(collectionId).doc(documentId).set(documentData, {
|
|
76
|
+
merge: true,
|
|
77
|
+
mergeFields: Object.keys(documentData)
|
|
78
|
+
});
|
|
79
|
+
} catch (error) {
|
|
80
|
+
throw new DatabaseAdapterErrors.DocumentDoesNotExistError(collectionId, documentId, error);
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
};
|
|
84
|
+
|
|
85
|
+
//#endregion
|
|
86
|
+
export { GcpFirestoreDatabaseAdapter };
|
|
87
|
+
//# sourceMappingURL=gcp-firestore.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gcp-firestore.mjs","names":["#instance","list: Document[]"],"sources":["../src/adapters/gcp-firestore.ts"],"sourcesContent":["import type { Firestore } from \"@google-cloud/firestore\";\nimport {\n DatabaseAdapterErrors,\n type DatabaseAdapter,\n type DatabaseAdapterOptions,\n type DatabaseDocumentListOptions,\n type StoryBookerDatabaseDocument,\n} from \"./_internal/database.ts\";\n\nexport class GcpFirestoreDatabaseAdapter implements DatabaseAdapter {\n #instance: Firestore;\n\n constructor(instance: Firestore) {\n this.#instance = instance;\n }\n\n metadata: DatabaseAdapter[\"metadata\"] = { name: \"Google Cloud Firestore\" };\n\n listCollections: DatabaseAdapter[\"listCollections\"] = async (_options) => {\n try {\n const collections = await this.#instance.listCollections();\n return collections.map((col) => col.id);\n } catch (error) {\n throw new DatabaseAdapterErrors.DatabaseNotInitializedError(error);\n }\n };\n\n // oxlint-disable-next-line class-methods-use-this --- NOOP\n createCollection: DatabaseAdapter[\"createCollection\"] = async (_collectionId, _options) => {\n // Firestore creates collections implicitly when you add a document.\n };\n\n hasCollection: DatabaseAdapter[\"hasCollection\"] = async (collectionId, _options) => {\n const col = this.#instance.collection(collectionId);\n const snapshot = await col.limit(1).get();\n return !snapshot.empty;\n };\n\n deleteCollection: DatabaseAdapter[\"deleteCollection\"] = async (collectionId, _options) => {\n // Firestore doesn't have a direct way to delete a collection\n // We need to delete all documents in the collection\n try {\n const col = this.#instance.collection(collectionId);\n const snapshot = await col.get();\n if (snapshot.empty) {\n return;\n }\n const batch = this.#instance.batch();\n for (const doc of snapshot.docs) {\n batch.delete(doc.ref);\n }\n await batch.commit();\n } catch (error) {\n throw new DatabaseAdapterErrors.CollectionDoesNotExistError(collectionId, error);\n }\n };\n\n listDocuments: DatabaseAdapter[\"listDocuments\"] = async <\n Document extends StoryBookerDatabaseDocument,\n >(\n collectionId: string,\n _listOptions: DatabaseDocumentListOptions<Document>,\n _options: DatabaseAdapterOptions,\n ) => {\n try {\n const col = this.#instance.collection(collectionId);\n const snapshot = await col.get();\n const list: Document[] = [];\n for (const doc of snapshot.docs) {\n const data = doc.data() as Document;\n list.push({ ...data, id: doc.id });\n }\n\n return list;\n } catch (error) {\n throw new DatabaseAdapterErrors.CollectionDoesNotExistError(collectionId, error);\n }\n };\n\n getDocument: DatabaseAdapter[\"getDocument\"] = async <\n Document extends StoryBookerDatabaseDocument,\n >(\n collectionId: string,\n documentId: string,\n _options: DatabaseAdapterOptions,\n ) => {\n const docRef = this.#instance.collection(collectionId).doc(documentId);\n const doc = await docRef.get();\n if (!doc.exists) {\n throw new DatabaseAdapterErrors.DocumentDoesNotExistError(collectionId, documentId);\n }\n return { ...doc.data(), id: doc.id } as Document;\n };\n\n createDocument: DatabaseAdapter[\"createDocument\"] = async (\n collectionId,\n documentData,\n _options,\n ) => {\n try {\n const docRef = this.#instance.collection(collectionId).doc(documentData.id);\n await docRef.create(documentData);\n } catch (error) {\n throw new DatabaseAdapterErrors.DocumentAlreadyExistsError(\n collectionId,\n documentData.id,\n error,\n );\n }\n };\n\n hasDocument: DatabaseAdapter[\"hasDocument\"] = async (collectionId, documentId, _options) => {\n const docRef = this.#instance.collection(collectionId).doc(documentId);\n const doc = await docRef.get();\n return doc.exists;\n };\n\n deleteDocument: DatabaseAdapter[\"deleteDocument\"] = async (\n collectionId,\n documentId,\n _options,\n ) => {\n try {\n const docRef = this.#instance.collection(collectionId).doc(documentId);\n await docRef.delete();\n } catch (error) {\n throw new DatabaseAdapterErrors.DocumentDoesNotExistError(collectionId, documentId, error);\n }\n };\n\n updateDocument: DatabaseAdapter[\"updateDocument\"] = async (\n collectionId,\n documentId,\n documentData,\n ) => {\n try {\n const docRef = this.#instance.collection(collectionId).doc(documentId);\n await docRef.set(documentData, {\n merge: true,\n mergeFields: Object.keys(documentData),\n });\n } catch (error) {\n throw new DatabaseAdapterErrors.DocumentDoesNotExistError(collectionId, documentId, error);\n }\n };\n}\n"],"mappings":";;;AASA,IAAa,8BAAb,MAAoE;CAClE;CAEA,YAAY,UAAqB;AAC/B,QAAKA,WAAY;;CAGnB,WAAwC,EAAE,MAAM,0BAA0B;CAE1E,kBAAsD,OAAO,aAAa;AACxE,MAAI;AAEF,WADoB,MAAM,MAAKA,SAAU,iBAAiB,EACvC,KAAK,QAAQ,IAAI,GAAG;WAChC,OAAO;AACd,SAAM,IAAI,sBAAsB,4BAA4B,MAAM;;;CAKtE,mBAAwD,OAAO,eAAe,aAAa;CAI3F,gBAAkD,OAAO,cAAc,aAAa;AAGlF,SAAO,EADU,MADL,MAAKA,SAAU,WAAW,aAAa,CACxB,MAAM,EAAE,CAAC,KAAK,EACxB;;CAGnB,mBAAwD,OAAO,cAAc,aAAa;AAGxF,MAAI;GAEF,MAAM,WAAW,MADL,MAAKA,SAAU,WAAW,aAAa,CACxB,KAAK;AAChC,OAAI,SAAS,MACX;GAEF,MAAM,QAAQ,MAAKA,SAAU,OAAO;AACpC,QAAK,MAAM,OAAO,SAAS,KACzB,OAAM,OAAO,IAAI,IAAI;AAEvB,SAAM,MAAM,QAAQ;WACb,OAAO;AACd,SAAM,IAAI,sBAAsB,4BAA4B,cAAc,MAAM;;;CAIpF,gBAAkD,OAGhD,cACA,cACA,aACG;AACH,MAAI;GAEF,MAAM,WAAW,MADL,MAAKA,SAAU,WAAW,aAAa,CACxB,KAAK;GAChC,MAAMC,OAAmB,EAAE;AAC3B,QAAK,MAAM,OAAO,SAAS,MAAM;IAC/B,MAAM,OAAO,IAAI,MAAM;AACvB,SAAK,KAAK;KAAE,GAAG;KAAM,IAAI,IAAI;KAAI,CAAC;;AAGpC,UAAO;WACA,OAAO;AACd,SAAM,IAAI,sBAAsB,4BAA4B,cAAc,MAAM;;;CAIpF,cAA8C,OAG5C,cACA,YACA,aACG;EAEH,MAAM,MAAM,MADG,MAAKD,SAAU,WAAW,aAAa,CAAC,IAAI,WAAW,CAC7C,KAAK;AAC9B,MAAI,CAAC,IAAI,OACP,OAAM,IAAI,sBAAsB,0BAA0B,cAAc,WAAW;AAErF,SAAO;GAAE,GAAG,IAAI,MAAM;GAAE,IAAI,IAAI;GAAI;;CAGtC,iBAAoD,OAClD,cACA,cACA,aACG;AACH,MAAI;AAEF,SADe,MAAKA,SAAU,WAAW,aAAa,CAAC,IAAI,aAAa,GAAG,CAC9D,OAAO,aAAa;WAC1B,OAAO;AACd,SAAM,IAAI,sBAAsB,2BAC9B,cACA,aAAa,IACb,MACD;;;CAIL,cAA8C,OAAO,cAAc,YAAY,aAAa;AAG1F,UADY,MADG,MAAKA,SAAU,WAAW,aAAa,CAAC,IAAI,WAAW,CAC7C,KAAK,EACnB;;CAGb,iBAAoD,OAClD,cACA,YACA,aACG;AACH,MAAI;AAEF,SADe,MAAKA,SAAU,WAAW,aAAa,CAAC,IAAI,WAAW,CACzD,QAAQ;WACd,OAAO;AACd,SAAM,IAAI,sBAAsB,0BAA0B,cAAc,YAAY,MAAM;;;CAI9F,iBAAoD,OAClD,cACA,YACA,iBACG;AACH,MAAI;AAEF,SADe,MAAKA,SAAU,WAAW,aAAa,CAAC,IAAI,WAAW,CACzD,IAAI,cAAc;IAC7B,OAAO;IACP,aAAa,OAAO,KAAK,aAAa;IACvC,CAAC;WACK,OAAO;AACd,SAAM,IAAI,sBAAsB,0BAA0B,cAAc,YAAY,MAAM"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { StorageAdapter } from "./~internal/adapter/storage.mjs";
|
|
2
|
+
import { Storage } from "@google-cloud/storage";
|
|
3
|
+
|
|
4
|
+
//#region src/adapters/gcp-storage.d.ts
|
|
5
|
+
declare class GcpGcsStorageService implements StorageAdapter {
|
|
6
|
+
#private;
|
|
7
|
+
constructor(client: Storage);
|
|
8
|
+
metadata: StorageAdapter["metadata"];
|
|
9
|
+
createContainer: StorageAdapter["createContainer"];
|
|
10
|
+
deleteContainer: StorageAdapter["deleteContainer"];
|
|
11
|
+
hasContainer: StorageAdapter["hasContainer"];
|
|
12
|
+
listContainers: StorageAdapter["listContainers"];
|
|
13
|
+
deleteFiles: StorageAdapter["deleteFiles"];
|
|
14
|
+
uploadFiles: StorageAdapter["uploadFiles"];
|
|
15
|
+
hasFile: StorageAdapter["hasFile"];
|
|
16
|
+
downloadFile: StorageAdapter["downloadFile"];
|
|
17
|
+
}
|
|
18
|
+
//#endregion
|
|
19
|
+
export { GcpGcsStorageService };
|
|
20
|
+
//# sourceMappingURL=gcp-storage.d.mts.map
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { StorageAdapterErrors } from "./~internal/adapter/storage.mjs";
|
|
2
|
+
import { Buffer } from "node:buffer";
|
|
3
|
+
import { Readable } from "node:stream";
|
|
4
|
+
|
|
5
|
+
//#region src/adapters/gcp-storage.ts
|
|
6
|
+
var GcpGcsStorageService = class {
|
|
7
|
+
#client;
|
|
8
|
+
constructor(client) {
|
|
9
|
+
this.#client = client;
|
|
10
|
+
}
|
|
11
|
+
metadata = { name: "Google Cloud Storage" };
|
|
12
|
+
createContainer = async (containerId, _options) => {
|
|
13
|
+
try {
|
|
14
|
+
const bucketName = genBucketNameFromContainerId(containerId);
|
|
15
|
+
await this.#client.createBucket(bucketName, {});
|
|
16
|
+
} catch (error) {
|
|
17
|
+
throw new StorageAdapterErrors.ContainerAlreadyExistsError(containerId, error);
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
deleteContainer = async (containerId, _options) => {
|
|
21
|
+
try {
|
|
22
|
+
const bucketName = genBucketNameFromContainerId(containerId);
|
|
23
|
+
await this.#client.bucket(bucketName).delete();
|
|
24
|
+
} catch (error) {
|
|
25
|
+
throw new StorageAdapterErrors.ContainerDoesNotExistError(containerId, error);
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
hasContainer = async (containerId, _options) => {
|
|
29
|
+
const bucketName = genBucketNameFromContainerId(containerId);
|
|
30
|
+
const [exists] = await this.#client.bucket(bucketName).exists();
|
|
31
|
+
return exists;
|
|
32
|
+
};
|
|
33
|
+
listContainers = async (_options) => {
|
|
34
|
+
const [buckets] = await this.#client.getBuckets();
|
|
35
|
+
return buckets.map((bucket) => bucket.name);
|
|
36
|
+
};
|
|
37
|
+
deleteFiles = async (containerId, filePathsOrPrefix, _options) => {
|
|
38
|
+
const bucketName = genBucketNameFromContainerId(containerId);
|
|
39
|
+
const bucket = this.#client.bucket(bucketName);
|
|
40
|
+
if (typeof filePathsOrPrefix === "string") await bucket.deleteFiles({ prefix: filePathsOrPrefix });
|
|
41
|
+
else await Promise.all(filePathsOrPrefix.map(async (filepath) => await bucket.file(filepath).delete({ ignoreNotFound: true })));
|
|
42
|
+
};
|
|
43
|
+
uploadFiles = async (containerId, files, _options) => {
|
|
44
|
+
const bucketName = genBucketNameFromContainerId(containerId);
|
|
45
|
+
const bucket = this.#client.bucket(bucketName);
|
|
46
|
+
await Promise.allSettled(files.map(async ({ content, path, mimeType }) => {
|
|
47
|
+
await uploadFileToGcs(bucket.file(path), content, mimeType);
|
|
48
|
+
}));
|
|
49
|
+
};
|
|
50
|
+
hasFile = async (containerId, filepath, _options) => {
|
|
51
|
+
const bucketName = genBucketNameFromContainerId(containerId);
|
|
52
|
+
const [exists] = await this.#client.bucket(bucketName).file(filepath).exists();
|
|
53
|
+
return exists;
|
|
54
|
+
};
|
|
55
|
+
downloadFile = async (containerId, filepath, _options) => {
|
|
56
|
+
const bucketName = genBucketNameFromContainerId(containerId);
|
|
57
|
+
const file = this.#client.bucket(bucketName).file(filepath);
|
|
58
|
+
const [exists] = await file.exists();
|
|
59
|
+
if (!exists) throw new StorageAdapterErrors.FileDoesNotExistError(containerId, filepath);
|
|
60
|
+
const [metadata] = await file.getMetadata();
|
|
61
|
+
const mimeType = metadata.contentType;
|
|
62
|
+
const readable = file.createReadStream();
|
|
63
|
+
return {
|
|
64
|
+
content: Readable.toWeb(readable),
|
|
65
|
+
mimeType,
|
|
66
|
+
path: filepath
|
|
67
|
+
};
|
|
68
|
+
};
|
|
69
|
+
};
|
|
70
|
+
function genBucketNameFromContainerId(containerId) {
|
|
71
|
+
return containerId.replaceAll(/[^\w.-]+/g, "-").replaceAll(/^-+|-+$/g, "").toLowerCase().slice(0, 63);
|
|
72
|
+
}
|
|
73
|
+
async function uploadFileToGcs(file, data, mimeType) {
|
|
74
|
+
if (typeof data === "string" || data instanceof Buffer) {
|
|
75
|
+
await file.save(data, { contentType: mimeType });
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
78
|
+
if (data instanceof Blob) {
|
|
79
|
+
const buffer = Buffer.from(await data.arrayBuffer());
|
|
80
|
+
await file.save(buffer, { contentType: mimeType });
|
|
81
|
+
return;
|
|
82
|
+
}
|
|
83
|
+
const readable = data instanceof ReadableStream ? Readable.fromWeb(data) : data;
|
|
84
|
+
if (readable instanceof Readable) {
|
|
85
|
+
await new Promise((resolve, reject) => {
|
|
86
|
+
const writeStream = file.createWriteStream({ contentType: mimeType });
|
|
87
|
+
readable.pipe(writeStream).on("finish", resolve).on("error", reject);
|
|
88
|
+
});
|
|
89
|
+
return;
|
|
90
|
+
}
|
|
91
|
+
throw new Error(`Unknown file type`);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
//#endregion
|
|
95
|
+
export { GcpGcsStorageService };
|
|
96
|
+
//# sourceMappingURL=gcp-storage.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gcp-storage.mjs","names":["#client"],"sources":["../src/adapters/gcp-storage.ts"],"sourcesContent":["import type { File, Storage } from \"@google-cloud/storage\";\nimport { Buffer } from \"node:buffer\";\nimport { Readable } from \"node:stream\";\nimport type streamWeb from \"node:stream/web\";\nimport { StorageAdapterErrors, type StorageAdapter } from \"./_internal/storage.ts\";\n\nexport class GcpGcsStorageService implements StorageAdapter {\n #client: Storage;\n\n constructor(client: Storage) {\n this.#client = client;\n }\n\n metadata: StorageAdapter[\"metadata\"] = { name: \"Google Cloud Storage\" };\n\n createContainer: StorageAdapter[\"createContainer\"] = async (containerId, _options) => {\n try {\n const bucketName = genBucketNameFromContainerId(containerId);\n await this.#client.createBucket(bucketName, {});\n } catch (error) {\n throw new StorageAdapterErrors.ContainerAlreadyExistsError(containerId, error);\n }\n };\n\n deleteContainer: StorageAdapter[\"deleteContainer\"] = async (containerId, _options) => {\n try {\n const bucketName = genBucketNameFromContainerId(containerId);\n await this.#client.bucket(bucketName).delete();\n } catch (error) {\n throw new StorageAdapterErrors.ContainerDoesNotExistError(containerId, error);\n }\n };\n\n hasContainer: StorageAdapter[\"hasContainer\"] = async (containerId, _options) => {\n const bucketName = genBucketNameFromContainerId(containerId);\n const [exists] = await this.#client.bucket(bucketName).exists();\n return exists;\n };\n\n listContainers: StorageAdapter[\"listContainers\"] = async (_options) => {\n const [buckets] = await this.#client.getBuckets();\n return buckets.map((bucket) => bucket.name);\n };\n\n deleteFiles: StorageAdapter[\"deleteFiles\"] = async (containerId, filePathsOrPrefix, _options) => {\n const bucketName = genBucketNameFromContainerId(containerId);\n const bucket = this.#client.bucket(bucketName);\n\n if (typeof filePathsOrPrefix === \"string\") {\n // Delete all files with the prefix\n await bucket.deleteFiles({ prefix: filePathsOrPrefix });\n } else {\n // Delete specific files\n await Promise.all(\n filePathsOrPrefix.map(\n async (filepath) => await bucket.file(filepath).delete({ ignoreNotFound: true }),\n ),\n );\n }\n };\n\n uploadFiles: StorageAdapter[\"uploadFiles\"] = async (containerId, files, _options) => {\n const bucketName = genBucketNameFromContainerId(containerId);\n const bucket = this.#client.bucket(bucketName);\n\n await Promise.allSettled(\n files.map(async ({ content, path, mimeType }) => {\n await uploadFileToGcs(bucket.file(path), content, mimeType);\n }),\n );\n };\n\n hasFile: StorageAdapter[\"hasFile\"] = async (containerId, filepath, _options) => {\n const bucketName = genBucketNameFromContainerId(containerId);\n const file = this.#client.bucket(bucketName).file(filepath);\n const [exists] = await file.exists();\n return exists;\n };\n\n downloadFile: StorageAdapter[\"downloadFile\"] = async (containerId, filepath, _options) => {\n const bucketName = genBucketNameFromContainerId(containerId);\n const file = this.#client.bucket(bucketName).file(filepath);\n\n const [exists] = await file.exists();\n if (!exists) {\n throw new StorageAdapterErrors.FileDoesNotExistError(containerId, filepath);\n }\n\n const [metadata] = await file.getMetadata();\n const mimeType = metadata.contentType;\n\n const readable = file.createReadStream();\n const content = Readable.toWeb(readable);\n\n return {\n content: content as ReadableStream,\n mimeType,\n path: filepath,\n };\n };\n}\n\nfunction genBucketNameFromContainerId(containerId: string): string {\n // GCS bucket names: lowercase, numbers, dashes, dots, 3-63 chars\n return containerId\n .replaceAll(/[^\\w.-]+/g, \"-\")\n .replaceAll(/^-+|-+$/g, \"\")\n .toLowerCase()\n .slice(0, 63);\n}\n\nasync function uploadFileToGcs(\n file: File,\n data: Blob | string | ReadableStream,\n mimeType: string,\n): Promise<void> {\n if (typeof data === \"string\" || data instanceof Buffer) {\n await file.save(data, { contentType: mimeType });\n return;\n }\n\n if (data instanceof Blob) {\n const buffer = Buffer.from(await data.arrayBuffer());\n await file.save(buffer, { contentType: mimeType });\n return;\n }\n\n const readable =\n data instanceof ReadableStream ? Readable.fromWeb(data as streamWeb.ReadableStream) : data;\n\n if (readable instanceof Readable) {\n // Node.js Readable stream\n await new Promise<void>((resolve, reject) => {\n const writeStream = file.createWriteStream({ contentType: mimeType });\n readable.pipe(writeStream).on(\"finish\", resolve).on(\"error\", reject);\n });\n return;\n }\n\n throw new Error(`Unknown file type`);\n}\n"],"mappings":";;;;;AAMA,IAAa,uBAAb,MAA4D;CAC1D;CAEA,YAAY,QAAiB;AAC3B,QAAKA,SAAU;;CAGjB,WAAuC,EAAE,MAAM,wBAAwB;CAEvE,kBAAqD,OAAO,aAAa,aAAa;AACpF,MAAI;GACF,MAAM,aAAa,6BAA6B,YAAY;AAC5D,SAAM,MAAKA,OAAQ,aAAa,YAAY,EAAE,CAAC;WACxC,OAAO;AACd,SAAM,IAAI,qBAAqB,4BAA4B,aAAa,MAAM;;;CAIlF,kBAAqD,OAAO,aAAa,aAAa;AACpF,MAAI;GACF,MAAM,aAAa,6BAA6B,YAAY;AAC5D,SAAM,MAAKA,OAAQ,OAAO,WAAW,CAAC,QAAQ;WACvC,OAAO;AACd,SAAM,IAAI,qBAAqB,2BAA2B,aAAa,MAAM;;;CAIjF,eAA+C,OAAO,aAAa,aAAa;EAC9E,MAAM,aAAa,6BAA6B,YAAY;EAC5D,MAAM,CAAC,UAAU,MAAM,MAAKA,OAAQ,OAAO,WAAW,CAAC,QAAQ;AAC/D,SAAO;;CAGT,iBAAmD,OAAO,aAAa;EACrE,MAAM,CAAC,WAAW,MAAM,MAAKA,OAAQ,YAAY;AACjD,SAAO,QAAQ,KAAK,WAAW,OAAO,KAAK;;CAG7C,cAA6C,OAAO,aAAa,mBAAmB,aAAa;EAC/F,MAAM,aAAa,6BAA6B,YAAY;EAC5D,MAAM,SAAS,MAAKA,OAAQ,OAAO,WAAW;AAE9C,MAAI,OAAO,sBAAsB,SAE/B,OAAM,OAAO,YAAY,EAAE,QAAQ,mBAAmB,CAAC;MAGvD,OAAM,QAAQ,IACZ,kBAAkB,IAChB,OAAO,aAAa,MAAM,OAAO,KAAK,SAAS,CAAC,OAAO,EAAE,gBAAgB,MAAM,CAAC,CACjF,CACF;;CAIL,cAA6C,OAAO,aAAa,OAAO,aAAa;EACnF,MAAM,aAAa,6BAA6B,YAAY;EAC5D,MAAM,SAAS,MAAKA,OAAQ,OAAO,WAAW;AAE9C,QAAM,QAAQ,WACZ,MAAM,IAAI,OAAO,EAAE,SAAS,MAAM,eAAe;AAC/C,SAAM,gBAAgB,OAAO,KAAK,KAAK,EAAE,SAAS,SAAS;IAC3D,CACH;;CAGH,UAAqC,OAAO,aAAa,UAAU,aAAa;EAC9E,MAAM,aAAa,6BAA6B,YAAY;EAE5D,MAAM,CAAC,UAAU,MADJ,MAAKA,OAAQ,OAAO,WAAW,CAAC,KAAK,SAAS,CAC/B,QAAQ;AACpC,SAAO;;CAGT,eAA+C,OAAO,aAAa,UAAU,aAAa;EACxF,MAAM,aAAa,6BAA6B,YAAY;EAC5D,MAAM,OAAO,MAAKA,OAAQ,OAAO,WAAW,CAAC,KAAK,SAAS;EAE3D,MAAM,CAAC,UAAU,MAAM,KAAK,QAAQ;AACpC,MAAI,CAAC,OACH,OAAM,IAAI,qBAAqB,sBAAsB,aAAa,SAAS;EAG7E,MAAM,CAAC,YAAY,MAAM,KAAK,aAAa;EAC3C,MAAM,WAAW,SAAS;EAE1B,MAAM,WAAW,KAAK,kBAAkB;AAGxC,SAAO;GACL,SAHc,SAAS,MAAM,SAAS;GAItC;GACA,MAAM;GACP;;;AAIL,SAAS,6BAA6B,aAA6B;AAEjE,QAAO,YACJ,WAAW,aAAa,IAAI,CAC5B,WAAW,YAAY,GAAG,CAC1B,aAAa,CACb,MAAM,GAAG,GAAG;;AAGjB,eAAe,gBACb,MACA,MACA,UACe;AACf,KAAI,OAAO,SAAS,YAAY,gBAAgB,QAAQ;AACtD,QAAM,KAAK,KAAK,MAAM,EAAE,aAAa,UAAU,CAAC;AAChD;;AAGF,KAAI,gBAAgB,MAAM;EACxB,MAAM,SAAS,OAAO,KAAK,MAAM,KAAK,aAAa,CAAC;AACpD,QAAM,KAAK,KAAK,QAAQ,EAAE,aAAa,UAAU,CAAC;AAClD;;CAGF,MAAM,WACJ,gBAAgB,iBAAiB,SAAS,QAAQ,KAAiC,GAAG;AAExF,KAAI,oBAAoB,UAAU;AAEhC,QAAM,IAAI,SAAe,SAAS,WAAW;GAC3C,MAAM,cAAc,KAAK,kBAAkB,EAAE,aAAa,UAAU,CAAC;AACrE,YAAS,KAAK,YAAY,CAAC,GAAG,UAAU,QAAQ,CAAC,GAAG,SAAS,OAAO;IACpE;AACF;;AAGF,OAAM,IAAI,MAAM,oBAAoB"}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import { generateStorageContainerId } from "../utils/adapter-utils.mjs";
|
|
2
|
+
import { writeStreamToFile } from "../utils/file-utils.mjs";
|
|
3
|
+
import { getMimeType } from "../~internal/mimes.mjs";
|
|
4
|
+
import { getStore } from "../utils/store.mjs";
|
|
5
|
+
import { BuildsModel } from "../models/builds-model.mjs";
|
|
6
|
+
import decompress from "decompress";
|
|
7
|
+
import { Buffer } from "node:buffer";
|
|
8
|
+
import fs from "node:fs";
|
|
9
|
+
import fsp from "node:fs/promises";
|
|
10
|
+
import os from "node:os";
|
|
11
|
+
import path from "node:path";
|
|
12
|
+
|
|
13
|
+
//#region src/handlers/handle-process-zip.ts
|
|
14
|
+
async function handleProcessZip(projectId, buildId, variant) {
|
|
15
|
+
const { abortSignal, logger, storage } = getStore();
|
|
16
|
+
const debugLog = (...args) => {
|
|
17
|
+
logger.log(`(${projectId}-${buildId}-${variant})`, ...args);
|
|
18
|
+
};
|
|
19
|
+
debugLog("Creating temp dir");
|
|
20
|
+
const localDirpath = fs.mkdtempSync(path.join(os.tmpdir(), `storybooker-${projectId}-${buildId}-`));
|
|
21
|
+
const localZipFilePath = path.join(localDirpath, `${variant}.zip`);
|
|
22
|
+
const outputDirpath = path.join(localDirpath, variant);
|
|
23
|
+
const containerId = generateStorageContainerId(projectId);
|
|
24
|
+
const buildIdModel = new BuildsModel(projectId).id(buildId);
|
|
25
|
+
try {
|
|
26
|
+
await buildIdModel.update({ [variant]: "processing" });
|
|
27
|
+
debugLog("Downloading zip file");
|
|
28
|
+
const file = await storage.downloadFile(containerId, `${buildId}/${variant}.zip`, {
|
|
29
|
+
abortSignal,
|
|
30
|
+
logger
|
|
31
|
+
});
|
|
32
|
+
if (!file.content) throw new Error("No file content found.");
|
|
33
|
+
if (typeof file.content === "string") await fsp.writeFile(localZipFilePath, file.content);
|
|
34
|
+
else if (file.content instanceof Blob) {
|
|
35
|
+
const arrayBuffer = await file.content.arrayBuffer();
|
|
36
|
+
await fsp.writeFile(localZipFilePath, Buffer.from(arrayBuffer));
|
|
37
|
+
} else await writeStreamToFile(localZipFilePath, file.content);
|
|
38
|
+
debugLog("Decompress zip file");
|
|
39
|
+
await decompress(localZipFilePath, outputDirpath);
|
|
40
|
+
debugLog("Upload uncompressed dir");
|
|
41
|
+
await storage.uploadFiles(containerId, await dirpathToFiles(outputDirpath, `${buildId}/${variant}`), {
|
|
42
|
+
abortSignal,
|
|
43
|
+
logger
|
|
44
|
+
});
|
|
45
|
+
await buildIdModel.update({ [variant]: "ready" });
|
|
46
|
+
} finally {
|
|
47
|
+
debugLog("Cleaning up temp dir");
|
|
48
|
+
await fsp.rm(localDirpath, {
|
|
49
|
+
force: true,
|
|
50
|
+
recursive: true
|
|
51
|
+
}).catch(logger.error);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
async function dirpathToFiles(dirpath, prefix) {
|
|
55
|
+
return (await fsp.readdir(dirpath, {
|
|
56
|
+
encoding: "utf8",
|
|
57
|
+
recursive: true,
|
|
58
|
+
withFileTypes: true
|
|
59
|
+
})).filter((file) => file.isFile() && !file.name.startsWith(".")).map((file) => path.join(file.parentPath, file.name)).map((filepath) => {
|
|
60
|
+
const relativePath = filepath.replace(`${dirpath}/`, "");
|
|
61
|
+
return {
|
|
62
|
+
content: createWebReadableStream(filepath),
|
|
63
|
+
mimeType: getMimeType(filepath),
|
|
64
|
+
path: path.posix.join(prefix, relativePath)
|
|
65
|
+
};
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
function createWebReadableStream(filepath) {
|
|
69
|
+
const readStream = fs.createReadStream(filepath);
|
|
70
|
+
return new ReadableStream({
|
|
71
|
+
cancel() {
|
|
72
|
+
readStream.destroy();
|
|
73
|
+
},
|
|
74
|
+
start(controller) {
|
|
75
|
+
readStream.on("data", (chunk) => {
|
|
76
|
+
controller.enqueue(chunk);
|
|
77
|
+
});
|
|
78
|
+
readStream.on("end", () => {
|
|
79
|
+
controller.close();
|
|
80
|
+
});
|
|
81
|
+
readStream.on("error", (error) => {
|
|
82
|
+
controller.error(error);
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
//#endregion
|
|
89
|
+
export { handleProcessZip };
|
|
90
|
+
//# sourceMappingURL=handle-process-zip.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"handle-process-zip.mjs","names":[],"sources":["../../src/handlers/handle-process-zip.ts"],"sourcesContent":["import decompress from \"decompress\";\nimport { Buffer } from \"node:buffer\";\nimport fs from \"node:fs\";\nimport fsp from \"node:fs/promises\";\nimport os from \"node:os\";\nimport path from \"node:path\";\nimport type { StoryBookerFile } from \"../adapters/_internal/storage.ts\";\nimport { BuildsModel } from \"../models/builds-model.ts\";\nimport type { BuildUploadVariant } from \"../models/builds-schema.ts\";\nimport { generateStorageContainerId } from \"../utils/adapter-utils.ts\";\nimport { writeStreamToFile } from \"../utils/file-utils.ts\";\nimport { getMimeType } from \"../utils/mime-utils.ts\";\nimport { getStore } from \"../utils/store.ts\";\n\nexport async function handleProcessZip(\n projectId: string,\n buildId: string,\n variant: BuildUploadVariant,\n): Promise<void> {\n const { abortSignal, logger, storage } = getStore();\n const debugLog = (...args: unknown[]): void => {\n logger.log(`(${projectId}-${buildId}-${variant})`, ...args);\n };\n\n debugLog(\"Creating temp dir\");\n const localDirpath = fs.mkdtempSync(\n path.join(os.tmpdir(), `storybooker-${projectId}-${buildId}-`),\n );\n const localZipFilePath = path.join(localDirpath, `${variant}.zip`);\n const outputDirpath = path.join(localDirpath, variant);\n\n const containerId = generateStorageContainerId(projectId);\n const buildIdModel = new BuildsModel(projectId).id(buildId);\n\n try {\n await buildIdModel.update({ [variant]: \"processing\" });\n\n debugLog(\"Downloading zip file\");\n const file = await storage.downloadFile(containerId, `${buildId}/${variant}.zip`, {\n abortSignal,\n logger,\n });\n\n if (!file.content) {\n throw new Error(\"No file content found.\");\n }\n\n if (typeof file.content === \"string\") {\n await fsp.writeFile(localZipFilePath, file.content);\n } else if (file.content instanceof Blob) {\n const arrayBuffer = await file.content.arrayBuffer();\n await fsp.writeFile(localZipFilePath, Buffer.from(arrayBuffer));\n } else {\n await writeStreamToFile(localZipFilePath, file.content);\n }\n\n debugLog(\"Decompress zip file\");\n await decompress(localZipFilePath, outputDirpath);\n\n debugLog(\"Upload uncompressed dir\");\n await storage.uploadFiles(\n containerId,\n await dirpathToFiles(outputDirpath, `${buildId}/${variant}`),\n { abortSignal, logger },\n );\n\n await buildIdModel.update({ [variant]: \"ready\" });\n } finally {\n debugLog(\"Cleaning up temp dir\");\n await fsp.rm(localDirpath, { force: true, recursive: true }).catch(logger.error);\n }\n}\n\nasync function dirpathToFiles(dirpath: string, prefix: string): Promise<StoryBookerFile[]> {\n const allEntriesInDir = await fsp.readdir(dirpath, {\n encoding: \"utf8\",\n recursive: true,\n withFileTypes: true,\n });\n const allFilesInDir = allEntriesInDir\n .filter((file) => file.isFile() && !file.name.startsWith(\".\"))\n .map((file) => path.join(file.parentPath, file.name));\n\n return allFilesInDir.map((filepath): StoryBookerFile => {\n const relativePath = filepath.replace(`${dirpath}/`, \"\");\n const content = createWebReadableStream(filepath);\n\n return {\n content,\n mimeType: getMimeType(filepath),\n path: path.posix.join(prefix, relativePath),\n };\n });\n}\n\nfunction createWebReadableStream(filepath: string): ReadableStream {\n const readStream = fs.createReadStream(filepath);\n\n return new ReadableStream({\n cancel() {\n readStream.destroy();\n },\n start(controller) {\n readStream.on(\"data\", (chunk) => {\n controller.enqueue(chunk);\n });\n\n readStream.on(\"end\", () => {\n controller.close();\n });\n\n readStream.on(\"error\", (error) => {\n controller.error(error);\n });\n },\n });\n}\n"],"mappings":";;;;;;;;;;;;;AAcA,eAAsB,iBACpB,WACA,SACA,SACe;CACf,MAAM,EAAE,aAAa,QAAQ,YAAY,UAAU;CACnD,MAAM,YAAY,GAAG,SAA0B;AAC7C,SAAO,IAAI,IAAI,UAAU,GAAG,QAAQ,GAAG,QAAQ,IAAI,GAAG,KAAK;;AAG7D,UAAS,oBAAoB;CAC7B,MAAM,eAAe,GAAG,YACtB,KAAK,KAAK,GAAG,QAAQ,EAAE,eAAe,UAAU,GAAG,QAAQ,GAAG,CAC/D;CACD,MAAM,mBAAmB,KAAK,KAAK,cAAc,GAAG,QAAQ,MAAM;CAClE,MAAM,gBAAgB,KAAK,KAAK,cAAc,QAAQ;CAEtD,MAAM,cAAc,2BAA2B,UAAU;CACzD,MAAM,eAAe,IAAI,YAAY,UAAU,CAAC,GAAG,QAAQ;AAE3D,KAAI;AACF,QAAM,aAAa,OAAO,GAAG,UAAU,cAAc,CAAC;AAEtD,WAAS,uBAAuB;EAChC,MAAM,OAAO,MAAM,QAAQ,aAAa,aAAa,GAAG,QAAQ,GAAG,QAAQ,OAAO;GAChF;GACA;GACD,CAAC;AAEF,MAAI,CAAC,KAAK,QACR,OAAM,IAAI,MAAM,yBAAyB;AAG3C,MAAI,OAAO,KAAK,YAAY,SAC1B,OAAM,IAAI,UAAU,kBAAkB,KAAK,QAAQ;WAC1C,KAAK,mBAAmB,MAAM;GACvC,MAAM,cAAc,MAAM,KAAK,QAAQ,aAAa;AACpD,SAAM,IAAI,UAAU,kBAAkB,OAAO,KAAK,YAAY,CAAC;QAE/D,OAAM,kBAAkB,kBAAkB,KAAK,QAAQ;AAGzD,WAAS,sBAAsB;AAC/B,QAAM,WAAW,kBAAkB,cAAc;AAEjD,WAAS,0BAA0B;AACnC,QAAM,QAAQ,YACZ,aACA,MAAM,eAAe,eAAe,GAAG,QAAQ,GAAG,UAAU,EAC5D;GAAE;GAAa;GAAQ,CACxB;AAED,QAAM,aAAa,OAAO,GAAG,UAAU,SAAS,CAAC;WACzC;AACR,WAAS,uBAAuB;AAChC,QAAM,IAAI,GAAG,cAAc;GAAE,OAAO;GAAM,WAAW;GAAM,CAAC,CAAC,MAAM,OAAO,MAAM;;;AAIpF,eAAe,eAAe,SAAiB,QAA4C;AAUzF,SATwB,MAAM,IAAI,QAAQ,SAAS;EACjD,UAAU;EACV,WAAW;EACX,eAAe;EAChB,CAAC,EAEC,QAAQ,SAAS,KAAK,QAAQ,IAAI,CAAC,KAAK,KAAK,WAAW,IAAI,CAAC,CAC7D,KAAK,SAAS,KAAK,KAAK,KAAK,YAAY,KAAK,KAAK,CAAC,CAElC,KAAK,aAA8B;EACtD,MAAM,eAAe,SAAS,QAAQ,GAAG,QAAQ,IAAI,GAAG;AAGxD,SAAO;GACL,SAHc,wBAAwB,SAAS;GAI/C,UAAU,YAAY,SAAS;GAC/B,MAAM,KAAK,MAAM,KAAK,QAAQ,aAAa;GAC5C;GACD;;AAGJ,SAAS,wBAAwB,UAAkC;CACjE,MAAM,aAAa,GAAG,iBAAiB,SAAS;AAEhD,QAAO,IAAI,eAAe;EACxB,SAAS;AACP,cAAW,SAAS;;EAEtB,MAAM,YAAY;AAChB,cAAW,GAAG,SAAS,UAAU;AAC/B,eAAW,QAAQ,MAAM;KACzB;AAEF,cAAW,GAAG,aAAa;AACzB,eAAW,OAAO;KAClB;AAEF,cAAW,GAAG,UAAU,UAAU;AAChC,eAAW,MAAM,MAAM;KACvB;;EAEL,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { LoggerAdapter } from "../~internal/adapter/logger.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/handlers/handle-purge.d.ts
|
|
4
|
+
type HandlePurge = (params: {
|
|
5
|
+
projectId?: string;
|
|
6
|
+
}, options: {
|
|
7
|
+
abortSignal?: AbortSignal;
|
|
8
|
+
logger?: LoggerAdapter;
|
|
9
|
+
}) => Promise<void>;
|
|
10
|
+
//#endregion
|
|
11
|
+
export { HandlePurge };
|
|
12
|
+
//# sourceMappingURL=handle-purge.d.mts.map
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { DEFAULT_PURGE_AFTER_DAYS, ONE_DAY_IN_MS } from "../~internal/constants.mjs";
|
|
2
|
+
import { getStore } from "../utils/store.mjs";
|
|
3
|
+
import { TagsModel } from "../models/tags-model.mjs";
|
|
4
|
+
import { ProjectsModel } from "../models/projects-model.mjs";
|
|
5
|
+
import { BuildsModel } from "../models/builds-model.mjs";
|
|
6
|
+
|
|
7
|
+
//#region src/handlers/handle-purge.ts
|
|
8
|
+
const handlePurge = async ({ projectId }) => {
|
|
9
|
+
const projectModel = new ProjectsModel();
|
|
10
|
+
if (projectId) await purgeProject(await projectModel.get(projectId));
|
|
11
|
+
else {
|
|
12
|
+
const promises = (await projectModel.list()).map((project) => purgeProject(project));
|
|
13
|
+
await Promise.allSettled(promises);
|
|
14
|
+
}
|
|
15
|
+
};
|
|
16
|
+
async function purgeProject(project) {
|
|
17
|
+
const { locale, logger } = getStore();
|
|
18
|
+
const { id: projectId, gitHubDefaultBranch, latestBuildId, purgeBuildsAfterDays = DEFAULT_PURGE_AFTER_DAYS } = project;
|
|
19
|
+
const expiryTime = new Date(Date.now() - purgeBuildsAfterDays * ONE_DAY_IN_MS);
|
|
20
|
+
logger.log(`[Project: ${projectId}] Purge builds which were last modified more than ${purgeBuildsAfterDays} days ago - before ${new Date(expiryTime).toLocaleString(locale)}`);
|
|
21
|
+
const buildsModel = new BuildsModel(projectId);
|
|
22
|
+
const expiredBuilds = await buildsModel.list({ filter: (item) => item.id !== latestBuildId && new Date(item.updatedAt) < expiryTime });
|
|
23
|
+
for (const build of expiredBuilds) await buildsModel.delete(build.id, true);
|
|
24
|
+
logger.log(`[Project: ${projectId}] Purged ${expiredBuilds.length} expired builds.`);
|
|
25
|
+
const tagsModel = new TagsModel(projectId);
|
|
26
|
+
const emptyTags = await tagsModel.list({ filter: (item) => {
|
|
27
|
+
if (item.type === "branch" && item.value === gitHubDefaultBranch) return false;
|
|
28
|
+
return item.buildsCount === 0;
|
|
29
|
+
} });
|
|
30
|
+
for (const tag of emptyTags) await tagsModel.delete(tag.id);
|
|
31
|
+
logger.log(`[Project: ${projectId}] Purged ${emptyTags.length} empty tags...`);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
//#endregion
|
|
35
|
+
export { handlePurge };
|
|
36
|
+
//# sourceMappingURL=handle-purge.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"handle-purge.mjs","names":["handlePurge: HandlePurge"],"sources":["../../src/handlers/handle-purge.ts"],"sourcesContent":["import type { LoggerAdapter } from \"../adapters/_internal/logger.ts\";\nimport { BuildsModel } from \"../models/builds-model.ts\";\nimport { ProjectsModel } from \"../models/projects-model.ts\";\nimport type { ProjectType } from \"../models/projects-schema.ts\";\nimport { TagsModel } from \"../models/tags-model.ts\";\nimport { DEFAULT_PURGE_AFTER_DAYS, ONE_DAY_IN_MS } from \"../utils/constants.ts\";\nimport { getStore } from \"../utils/store.ts\";\n\nexport type HandlePurge = (\n params: { projectId?: string },\n options: { abortSignal?: AbortSignal; logger?: LoggerAdapter },\n) => Promise<void>;\n\nexport const handlePurge: HandlePurge = async ({ projectId }) => {\n const projectModel = new ProjectsModel();\n if (projectId) {\n const project = await projectModel.get(projectId);\n await purgeProject(project);\n } else {\n const projects = await projectModel.list();\n const promises = projects.map((project) => purgeProject(project));\n await Promise.allSettled(promises);\n }\n};\n\nasync function purgeProject(project: ProjectType): Promise<void> {\n const { locale, logger } = getStore();\n const {\n id: projectId,\n gitHubDefaultBranch,\n latestBuildId,\n purgeBuildsAfterDays = DEFAULT_PURGE_AFTER_DAYS,\n } = project;\n const expiryTime = new Date(Date.now() - purgeBuildsAfterDays * ONE_DAY_IN_MS);\n logger.log(\n `[Project: ${projectId}] Purge builds which were last modified more than ${purgeBuildsAfterDays} days ago - before ${new Date(\n expiryTime,\n ).toLocaleString(locale)}`,\n );\n\n const buildsModel = new BuildsModel(projectId);\n const expiredBuilds = await buildsModel.list({\n filter: (item) => item.id !== latestBuildId && new Date(item.updatedAt) < expiryTime,\n });\n for (const build of expiredBuilds) {\n // oxlint-disable-next-line no-await-in-loop\n await buildsModel.delete(build.id, true);\n }\n logger.log(`[Project: ${projectId}] Purged ${expiredBuilds.length} expired builds.`);\n\n const tagsModel = new TagsModel(projectId);\n const emptyTags = await tagsModel.list({\n filter: (item) => {\n if (item.type === \"branch\" && item.value === gitHubDefaultBranch) {\n return false;\n }\n return item.buildsCount === 0;\n },\n });\n for (const tag of emptyTags) {\n // oxlint-disable-next-line no-await-in-loop\n await tagsModel.delete(tag.id);\n }\n logger.log(`[Project: ${projectId}] Purged ${emptyTags.length} empty tags...`);\n}\n"],"mappings":";;;;;;;AAaA,MAAaA,cAA2B,OAAO,EAAE,gBAAgB;CAC/D,MAAM,eAAe,IAAI,eAAe;AACxC,KAAI,UAEF,OAAM,aADU,MAAM,aAAa,IAAI,UAAU,CACtB;MACtB;EAEL,MAAM,YADW,MAAM,aAAa,MAAM,EAChB,KAAK,YAAY,aAAa,QAAQ,CAAC;AACjE,QAAM,QAAQ,WAAW,SAAS;;;AAItC,eAAe,aAAa,SAAqC;CAC/D,MAAM,EAAE,QAAQ,WAAW,UAAU;CACrC,MAAM,EACJ,IAAI,WACJ,qBACA,eACA,uBAAuB,6BACrB;CACJ,MAAM,aAAa,IAAI,KAAK,KAAK,KAAK,GAAG,uBAAuB,cAAc;AAC9E,QAAO,IACL,aAAa,UAAU,oDAAoD,qBAAqB,qBAAqB,IAAI,KACvH,WACD,CAAC,eAAe,OAAO,GACzB;CAED,MAAM,cAAc,IAAI,YAAY,UAAU;CAC9C,MAAM,gBAAgB,MAAM,YAAY,KAAK,EAC3C,SAAS,SAAS,KAAK,OAAO,iBAAiB,IAAI,KAAK,KAAK,UAAU,GAAG,YAC3E,CAAC;AACF,MAAK,MAAM,SAAS,cAElB,OAAM,YAAY,OAAO,MAAM,IAAI,KAAK;AAE1C,QAAO,IAAI,aAAa,UAAU,WAAW,cAAc,OAAO,kBAAkB;CAEpF,MAAM,YAAY,IAAI,UAAU,UAAU;CAC1C,MAAM,YAAY,MAAM,UAAU,KAAK,EACrC,SAAS,SAAS;AAChB,MAAI,KAAK,SAAS,YAAY,KAAK,UAAU,oBAC3C,QAAO;AAET,SAAO,KAAK,gBAAgB;IAE/B,CAAC;AACF,MAAK,MAAM,OAAO,UAEhB,OAAM,UAAU,OAAO,IAAI,GAAG;AAEhC,QAAO,IAAI,aAAa,UAAU,WAAW,UAAU,OAAO,gBAAgB"}
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import { CACHE_CONTROL_PUBLIC_YEAR, SERVICE_NAME } from "../~internal/constants.mjs";
|
|
2
|
+
import { generateStorageContainerId } from "../utils/adapter-utils.mjs";
|
|
3
|
+
import { getMimeType } from "../~internal/mimes.mjs";
|
|
4
|
+
import { getStore } from "../utils/store.mjs";
|
|
5
|
+
import { urlBuilder } from "../urls.mjs";
|
|
6
|
+
import { authenticateOrThrow } from "../utils/auth.mjs";
|
|
7
|
+
import { SuperHeaders } from "@remix-run/headers";
|
|
8
|
+
import { HTTPException } from "hono/http-exception";
|
|
9
|
+
import path from "node:path";
|
|
10
|
+
|
|
11
|
+
//#region src/handlers/handle-serve-storybook.ts
|
|
12
|
+
async function handleServeStoryBook({ buildId, filepath, projectId }) {
|
|
13
|
+
const { abortSignal, logger, storage } = getStore();
|
|
14
|
+
const storageFilepath = path.posix.join(buildId, filepath);
|
|
15
|
+
authenticateOrThrow({
|
|
16
|
+
action: "read",
|
|
17
|
+
projectId,
|
|
18
|
+
resource: "build"
|
|
19
|
+
});
|
|
20
|
+
try {
|
|
21
|
+
const { content, mimeType } = await storage.downloadFile(generateStorageContainerId(projectId), storageFilepath, {
|
|
22
|
+
abortSignal,
|
|
23
|
+
logger
|
|
24
|
+
});
|
|
25
|
+
if (!content) throw new HTTPException(404, { message: "File does not contain any content" });
|
|
26
|
+
const headers = new SuperHeaders();
|
|
27
|
+
headers.contentType = mimeType ?? getMimeType(filepath);
|
|
28
|
+
headers.cacheControl = CACHE_CONTROL_PUBLIC_YEAR;
|
|
29
|
+
if (filepath.endsWith("index.html")) {
|
|
30
|
+
const bodyWithBackButton = (typeof content === "string" ? content : await new Response(content).text()).replace(`</body>`, `
|
|
31
|
+
<div><a id="view-all" href="${urlBuilder.buildsList(projectId)}"
|
|
32
|
+
style="position: fixed; bottom: 0.5rem; left: 0.5rem; z-index: 9999; padding: 0.25rem 0.5rem; background-color: black; color: white; border-radius: 0.25rem; text-decoration: none; font-size: 1rem; font-face: sans-serif; font-weight: 400;">
|
|
33
|
+
← ${SERVICE_NAME}
|
|
34
|
+
</a></div>
|
|
35
|
+
|
|
36
|
+
${relativeHrefScripts}
|
|
37
|
+
</body>`);
|
|
38
|
+
return new Response(bodyWithBackButton, {
|
|
39
|
+
headers,
|
|
40
|
+
status: 200
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
if (filepath.endsWith("iframe.html")) {
|
|
44
|
+
const bodyWithBackButton = (typeof content === "string" ? content : await new Response(content).text()).replace(`</body>`, `
|
|
45
|
+
${relativeHrefScripts}
|
|
46
|
+
</body>`);
|
|
47
|
+
return new Response(bodyWithBackButton, {
|
|
48
|
+
headers,
|
|
49
|
+
status: 200
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
if (content instanceof ReadableStream) {
|
|
53
|
+
const body = await new Response(content).arrayBuffer();
|
|
54
|
+
return new Response(body, {
|
|
55
|
+
headers,
|
|
56
|
+
status: 200
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
return new Response(content, {
|
|
60
|
+
headers,
|
|
61
|
+
status: 200
|
|
62
|
+
});
|
|
63
|
+
} catch (error) {
|
|
64
|
+
throw new HTTPException(404, {
|
|
65
|
+
message: "File not found",
|
|
66
|
+
cause: error
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
const relativeHrefScripts = `
|
|
71
|
+
<script defer>
|
|
72
|
+
// script to replace absolute links with relative links
|
|
73
|
+
window.addEventListener('load', () => {
|
|
74
|
+
const linkElements = document.querySelectorAll("[href^='/']");
|
|
75
|
+
linkElements.forEach((el) => {
|
|
76
|
+
const href = typeof el.href === "string" ? el.href : el.href.baseVal;
|
|
77
|
+
const newHref = "." + href.replace(origin, "");
|
|
78
|
+
el.setAttribute("href", newHref);
|
|
79
|
+
});
|
|
80
|
+
const mediaElements = document.querySelectorAll("[src^='/']");
|
|
81
|
+
mediaElements.forEach((el) => {
|
|
82
|
+
const newSrc = el.src.replace(origin, ".");
|
|
83
|
+
el.setAttribute("src", newSrc);
|
|
84
|
+
if (el.hasAttribute("srcset")) {
|
|
85
|
+
const newSrcset = el.srcset.replaceAll(origin, ".");
|
|
86
|
+
el.setAttribute("srcset", newSrcset);
|
|
87
|
+
}
|
|
88
|
+
});
|
|
89
|
+
}, { once: true });
|
|
90
|
+
<\/script>`;
|
|
91
|
+
|
|
92
|
+
//#endregion
|
|
93
|
+
export { handleServeStoryBook };
|
|
94
|
+
//# sourceMappingURL=handle-serve-storybook.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"handle-serve-storybook.mjs","names":[],"sources":["../../src/handlers/handle-serve-storybook.ts"],"sourcesContent":["import { SuperHeaders } from \"@remix-run/headers\";\nimport { HTTPException } from \"hono/http-exception\";\nimport path from \"node:path\";\nimport { urlBuilder } from \"../urls.ts\";\nimport { generateStorageContainerId } from \"../utils/adapter-utils.ts\";\nimport { authenticateOrThrow } from \"../utils/auth.ts\";\nimport { CACHE_CONTROL_PUBLIC_YEAR, SERVICE_NAME } from \"../utils/constants.ts\";\nimport { getMimeType } from \"../utils/mime-utils.ts\";\nimport { getStore } from \"../utils/store.ts\";\n\nexport async function handleServeStoryBook({\n buildId,\n filepath,\n projectId,\n}: {\n buildId: string;\n projectId: string;\n filepath: string;\n}): Promise<Response> {\n const { abortSignal, logger, storage } = getStore();\n const storageFilepath = path.posix.join(buildId, filepath);\n authenticateOrThrow({ action: \"read\", projectId, resource: \"build\" });\n\n try {\n const { content, mimeType } = await storage.downloadFile(\n generateStorageContainerId(projectId),\n storageFilepath,\n { abortSignal, logger },\n );\n\n if (!content) {\n throw new HTTPException(404, { message: \"File does not contain any content\" });\n }\n\n const headers = new SuperHeaders();\n headers.contentType = mimeType ?? getMimeType(filepath);\n headers.cacheControl = CACHE_CONTROL_PUBLIC_YEAR;\n\n if (filepath.endsWith(\"index.html\")) {\n // Appending custom UI to index.html\n const data = typeof content === \"string\" ? content : await new Response(content).text();\n const bodyWithBackButton = data.replace(\n `</body>`,\n `\n <div><a id=\"view-all\" href=\"${urlBuilder.buildsList(projectId)}\"\n style=\"position: fixed; bottom: 0.5rem; left: 0.5rem; z-index: 9999; padding: 0.25rem 0.5rem; background-color: black; color: white; border-radius: 0.25rem; text-decoration: none; font-size: 1rem; font-face: sans-serif; font-weight: 400;\">\n ← ${SERVICE_NAME}\n </a></div>\n \n ${relativeHrefScripts}\n</body>`,\n );\n\n return new Response(bodyWithBackButton, { headers, status: 200 });\n }\n\n if (filepath.endsWith(\"iframe.html\")) {\n // Appending custom UI to index.html\n const data = typeof content === \"string\" ? content : await new Response(content).text();\n const bodyWithBackButton = data.replace(\n `</body>`,\n `\n${relativeHrefScripts}\n</body>`,\n );\n\n return new Response(bodyWithBackButton, { headers, status: 200 });\n }\n\n if (content instanceof ReadableStream) {\n const body = await new Response(content).arrayBuffer();\n return new Response(body, { headers, status: 200 });\n }\n\n return new Response(content, { headers, status: 200 });\n } catch (error) {\n throw new HTTPException(404, { message: \"File not found\", cause: error });\n }\n}\n\nconst relativeHrefScripts = `\n<script defer>\n// script to replace absolute links with relative links \nwindow.addEventListener('load', () => {\n const linkElements = document.querySelectorAll(\"[href^='/']\");\n linkElements.forEach((el) => {\n const href = typeof el.href === \"string\" ? el.href : el.href.baseVal;\n const newHref = \".\" + href.replace(origin, \"\");\n el.setAttribute(\"href\", newHref);\n });\n const mediaElements = document.querySelectorAll(\"[src^='/']\");\n mediaElements.forEach((el) => { \n const newSrc = el.src.replace(origin, \".\");\n el.setAttribute(\"src\", newSrc);\n if (el.hasAttribute(\"srcset\")) {\n const newSrcset = el.srcset.replaceAll(origin, \".\");\n el.setAttribute(\"srcset\", newSrcset);\n }\n });\n}, { once: true });\n</script>`;\n"],"mappings":";;;;;;;;;;;AAUA,eAAsB,qBAAqB,EACzC,SACA,UACA,aAKoB;CACpB,MAAM,EAAE,aAAa,QAAQ,YAAY,UAAU;CACnD,MAAM,kBAAkB,KAAK,MAAM,KAAK,SAAS,SAAS;AAC1D,qBAAoB;EAAE,QAAQ;EAAQ;EAAW,UAAU;EAAS,CAAC;AAErE,KAAI;EACF,MAAM,EAAE,SAAS,aAAa,MAAM,QAAQ,aAC1C,2BAA2B,UAAU,EACrC,iBACA;GAAE;GAAa;GAAQ,CACxB;AAED,MAAI,CAAC,QACH,OAAM,IAAI,cAAc,KAAK,EAAE,SAAS,qCAAqC,CAAC;EAGhF,MAAM,UAAU,IAAI,cAAc;AAClC,UAAQ,cAAc,YAAY,YAAY,SAAS;AACvD,UAAQ,eAAe;AAEvB,MAAI,SAAS,SAAS,aAAa,EAAE;GAGnC,MAAM,sBADO,OAAO,YAAY,WAAW,UAAU,MAAM,IAAI,SAAS,QAAQ,CAAC,MAAM,EACvD,QAC9B,WACA;gCACwB,WAAW,WAAW,UAAU,CAAC;;MAE3D,aAAa;;;IAGf,oBAAoB;SAEjB;AAED,UAAO,IAAI,SAAS,oBAAoB;IAAE;IAAS,QAAQ;IAAK,CAAC;;AAGnE,MAAI,SAAS,SAAS,cAAc,EAAE;GAGpC,MAAM,sBADO,OAAO,YAAY,WAAW,UAAU,MAAM,IAAI,SAAS,QAAQ,CAAC,MAAM,EACvD,QAC9B,WACA;EACN,oBAAoB;SAEf;AAED,UAAO,IAAI,SAAS,oBAAoB;IAAE;IAAS,QAAQ;IAAK,CAAC;;AAGnE,MAAI,mBAAmB,gBAAgB;GACrC,MAAM,OAAO,MAAM,IAAI,SAAS,QAAQ,CAAC,aAAa;AACtD,UAAO,IAAI,SAAS,MAAM;IAAE;IAAS,QAAQ;IAAK,CAAC;;AAGrD,SAAO,IAAI,SAAS,SAAS;GAAE;GAAS,QAAQ;GAAK,CAAC;UAC/C,OAAO;AACd,QAAM,IAAI,cAAc,KAAK;GAAE,SAAS;GAAkB,OAAO;GAAO,CAAC;;;AAI7E,MAAM,sBAAsB"}
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { StoryBookerUser } from "./~internal/adapter/auth.mjs";
|
|
2
|
+
import { PurgeHandlerOptions, RouterOptions } from "./~internal/types.mjs";
|
|
3
|
+
import "./~internal/adapter.mjs";
|
|
4
|
+
import { HandlePurge } from "./handlers/handle-purge.mjs";
|
|
5
|
+
import { appRouter, openapiConfig } from "./~internal/router.mjs";
|
|
6
|
+
import { Hono } from "hono";
|
|
7
|
+
import { TimingVariables } from "hono/timing";
|
|
8
|
+
|
|
9
|
+
//#region src/index.d.ts
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Callback to create a Hono App based on provided options.
|
|
13
|
+
* @param options Options for creating a request handler.
|
|
14
|
+
* @returns The Hono App which can be used wherever Hono is supported.
|
|
15
|
+
*/
|
|
16
|
+
declare function createHonoRouter<User extends StoryBookerUser>(options: RouterOptions<User>): Hono<{
|
|
17
|
+
Variables: TimingVariables;
|
|
18
|
+
}>;
|
|
19
|
+
/**
|
|
20
|
+
* Callback to create a purge-handler based on provided options.
|
|
21
|
+
* Purging deletes all builds older than certain days based on Project's configuration.
|
|
22
|
+
*
|
|
23
|
+
* Note: The latest build on project's default branch is not deleted.
|
|
24
|
+
*/
|
|
25
|
+
declare function createPurgeHandler(options: PurgeHandlerOptions): HandlePurge;
|
|
26
|
+
//#endregion
|
|
27
|
+
export { appRouter, createHonoRouter, createPurgeHandler, openapiConfig };
|
|
28
|
+
//# sourceMappingURL=index.d.mts.map
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { createConsoleLoggerAdapter } from "./~internal/adapter/logger.mjs";
|
|
2
|
+
import { DEFAULT_LOCALE } from "./~internal/constants.mjs";
|
|
3
|
+
import { localStore, setupStore } from "./utils/store.mjs";
|
|
4
|
+
import { onUnhandledErrorHandler, parseErrorMessage, prettifyZodValidationErrorMiddleware } from "./utils/error.mjs";
|
|
5
|
+
import { handlePurge } from "./handlers/handle-purge.mjs";
|
|
6
|
+
import { appRouter, openapiConfig } from "./~internal/router.mjs";
|
|
7
|
+
import { htmxRedirectResponse } from "./utils/response.mjs";
|
|
8
|
+
import { SuperHeaders } from "@remix-run/headers";
|
|
9
|
+
import { Hono } from "hono";
|
|
10
|
+
import { logger } from "hono/logger";
|
|
11
|
+
|
|
12
|
+
//#region src/index.ts
|
|
13
|
+
if ("setEncoding" in process.stdout) process.stdout.setEncoding("utf8");
|
|
14
|
+
/**
|
|
15
|
+
* Callback to create a Hono App based on provided options.
|
|
16
|
+
* @param options Options for creating a request handler.
|
|
17
|
+
* @returns The Hono App which can be used wherever Hono is supported.
|
|
18
|
+
*/
|
|
19
|
+
function createHonoRouter(options) {
|
|
20
|
+
const logger$1 = options.logger ?? createConsoleLoggerAdapter();
|
|
21
|
+
const middlewares = options.config?.middlewares ?? [];
|
|
22
|
+
const initPromises = Promise.allSettled([
|
|
23
|
+
options.auth?.init?.({ logger: logger$1 }).catch(logger$1.error),
|
|
24
|
+
options.database.init?.({ logger: logger$1 }).catch(logger$1.error),
|
|
25
|
+
options.storage.init?.({ logger: logger$1 }).catch(logger$1.error)
|
|
26
|
+
]);
|
|
27
|
+
return new Hono({ strict: false }).use(logger(logger$1.log), prettifyZodValidationErrorMiddleware(logger$1), ...middlewares, setupStore(options, initPromises), htmxRedirectResponse()).route("/", appRouter).onError(onUnhandledErrorHandler(options));
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Callback to create a purge-handler based on provided options.
|
|
31
|
+
* Purging deletes all builds older than certain days based on Project's configuration.
|
|
32
|
+
*
|
|
33
|
+
* Note: The latest build on project's default branch is not deleted.
|
|
34
|
+
*/
|
|
35
|
+
function createPurgeHandler(options) {
|
|
36
|
+
const logger$1 = options.logger ?? createConsoleLoggerAdapter();
|
|
37
|
+
return async (...params) => {
|
|
38
|
+
const dummyRequest = new Request("http://0.0.0.0/");
|
|
39
|
+
localStore.enterWith({
|
|
40
|
+
abortSignal: params[1].abortSignal,
|
|
41
|
+
database: options.database,
|
|
42
|
+
errorParser: options.errorParser,
|
|
43
|
+
headers: new SuperHeaders(),
|
|
44
|
+
locale: DEFAULT_LOCALE,
|
|
45
|
+
logger: params[1]?.logger ?? logger$1,
|
|
46
|
+
prefix: "/",
|
|
47
|
+
request: dummyRequest,
|
|
48
|
+
storage: options.storage,
|
|
49
|
+
url: dummyRequest.url,
|
|
50
|
+
user: null
|
|
51
|
+
});
|
|
52
|
+
try {
|
|
53
|
+
await handlePurge(...params);
|
|
54
|
+
} catch (error) {
|
|
55
|
+
logger$1.error("PurgeError", parseErrorMessage(error, options.errorParser).errorMessage);
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
//#endregion
|
|
61
|
+
export { appRouter, createHonoRouter, createPurgeHandler, openapiConfig };
|
|
62
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["logger","loggerMiddleware"],"sources":["../src/index.ts"],"sourcesContent":["import { SuperHeaders } from \"@remix-run/headers\";\nimport { Hono } from \"hono\";\nimport { logger as loggerMiddleware } from \"hono/logger\";\nimport type { TimingVariables } from \"hono/timing\";\nimport { createConsoleLoggerAdapter, type StoryBookerUser } from \"./adapters/_internal/index.ts\";\nimport { handlePurge, type HandlePurge } from \"./handlers/handle-purge.ts\";\nimport { appRouter } from \"./routers/_app-router.ts\";\nimport type { PurgeHandlerOptions, RouterOptions } from \"./types.ts\";\nimport { DEFAULT_LOCALE } from \"./utils/constants.ts\";\nimport {\n onUnhandledErrorHandler,\n parseErrorMessage,\n prettifyZodValidationErrorMiddleware,\n} from \"./utils/error.ts\";\nimport { htmxRedirectResponse } from \"./utils/response.ts\";\nimport { localStore, setupStore } from \"./utils/store.ts\";\n\nif (\"setEncoding\" in process.stdout) {\n process.stdout.setEncoding(\"utf8\");\n}\nexport { appRouter, openapiConfig } from \"./routers/_app-router.ts\";\n\n/**\n * Callback to create a Hono App based on provided options.\n * @param options Options for creating a request handler.\n * @returns The Hono App which can be used wherever Hono is supported.\n */\nexport function createHonoRouter<User extends StoryBookerUser>(\n options: RouterOptions<User>,\n): Hono<{ Variables: TimingVariables }> {\n const logger = options.logger ?? createConsoleLoggerAdapter();\n const middlewares = options.config?.middlewares ?? [];\n const initPromises = Promise.allSettled([\n options.auth?.init?.({ logger }).catch(logger.error),\n options.database.init?.({ logger }).catch(logger.error),\n options.storage.init?.({ logger }).catch(logger.error),\n ]);\n\n return new Hono<{ Variables: TimingVariables }>({ strict: false })\n .use(\n loggerMiddleware(logger.log),\n prettifyZodValidationErrorMiddleware(logger),\n ...middlewares,\n setupStore<User>(options, initPromises),\n htmxRedirectResponse(),\n )\n .route(\"/\", appRouter)\n .onError(onUnhandledErrorHandler<User>(options));\n}\n\n/**\n * Callback to create a purge-handler based on provided options.\n * Purging deletes all builds older than certain days based on Project's configuration.\n *\n * Note: The latest build on project's default branch is not deleted.\n */\nexport function createPurgeHandler(options: PurgeHandlerOptions): HandlePurge {\n const logger = options.logger ?? createConsoleLoggerAdapter();\n\n return async (...params: Parameters<HandlePurge>): Promise<void> => {\n const dummyRequest = new Request(\"http://0.0.0.0/\");\n localStore.enterWith({\n abortSignal: params[1].abortSignal,\n database: options.database,\n errorParser: options.errorParser,\n headers: new SuperHeaders(),\n locale: DEFAULT_LOCALE,\n logger: params[1]?.logger ?? logger,\n prefix: \"/\",\n request: dummyRequest,\n storage: options.storage,\n url: dummyRequest.url,\n user: null,\n });\n\n try {\n await handlePurge(...params);\n } catch (error) {\n logger.error(\"PurgeError\", parseErrorMessage(error, options.errorParser).errorMessage);\n }\n };\n}\n"],"mappings":";;;;;;;;;;;;AAiBA,IAAI,iBAAiB,QAAQ,OAC3B,SAAQ,OAAO,YAAY,OAAO;;;;;;AASpC,SAAgB,iBACd,SACsC;CACtC,MAAMA,WAAS,QAAQ,UAAU,4BAA4B;CAC7D,MAAM,cAAc,QAAQ,QAAQ,eAAe,EAAE;CACrD,MAAM,eAAe,QAAQ,WAAW;EACtC,QAAQ,MAAM,OAAO,EAAE,kBAAQ,CAAC,CAAC,MAAMA,SAAO,MAAM;EACpD,QAAQ,SAAS,OAAO,EAAE,kBAAQ,CAAC,CAAC,MAAMA,SAAO,MAAM;EACvD,QAAQ,QAAQ,OAAO,EAAE,kBAAQ,CAAC,CAAC,MAAMA,SAAO,MAAM;EACvD,CAAC;AAEF,QAAO,IAAI,KAAqC,EAAE,QAAQ,OAAO,CAAC,CAC/D,IACCC,OAAiBD,SAAO,IAAI,EAC5B,qCAAqCA,SAAO,EAC5C,GAAG,aACH,WAAiB,SAAS,aAAa,EACvC,sBAAsB,CACvB,CACA,MAAM,KAAK,UAAU,CACrB,QAAQ,wBAA8B,QAAQ,CAAC;;;;;;;;AASpD,SAAgB,mBAAmB,SAA2C;CAC5E,MAAMA,WAAS,QAAQ,UAAU,4BAA4B;AAE7D,QAAO,OAAO,GAAG,WAAmD;EAClE,MAAM,eAAe,IAAI,QAAQ,kBAAkB;AACnD,aAAW,UAAU;GACnB,aAAa,OAAO,GAAG;GACvB,UAAU,QAAQ;GAClB,aAAa,QAAQ;GACrB,SAAS,IAAI,cAAc;GAC3B,QAAQ;GACR,QAAQ,OAAO,IAAI,UAAUA;GAC7B,QAAQ;GACR,SAAS;GACT,SAAS,QAAQ;GACjB,KAAK,aAAa;GAClB,MAAM;GACP,CAAC;AAEF,MAAI;AACF,SAAM,YAAY,GAAG,OAAO;WACrB,OAAO;AACd,YAAO,MAAM,cAAc,kBAAkB,OAAO,QAAQ,YAAY,CAAC,aAAa"}
|