@genoacms/adapter-gcp 0.5.2 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,8 @@
1
+ import { handler } from './build/handler.js'
2
+
3
+ function genoacms (req, res) {
4
+ handler(req, res, undefined)
5
+ }
6
+
7
+ export { genoacms }
8
+
@@ -1,5 +1,8 @@
1
1
  import config from '../../config.js';
2
2
  import { ProjectsClient } from '@google-cloud/resource-manager';
3
+ const authorizationConfig = config.authorization.providers.find((provider) => provider.name === 'gcp');
4
+ if (!authorizationConfig)
5
+ throw new Error('authorization-provider-not-found');
3
6
  const resourceManager = new ProjectsClient({
4
7
  projectId: config.authorization.projectId,
5
8
  credentials: config.authorization.credentials
@@ -1,7 +1,7 @@
1
1
  import type { Adapter } from '@genoacms/cloudabstraction/database';
2
- declare const createDocument: Adapter.createDocument;
3
- declare const getCollection: Adapter.getCollection;
4
- declare const getDocument: Adapter.getDocument;
5
- declare const updateDocument: Adapter.updateDocument;
6
- declare const deleteDocument: Adapter.deleteDocument;
2
+ declare const createDocument: Adapter['createDocument'];
3
+ declare const getCollection: Adapter['getCollection'];
4
+ declare const getDocument: Adapter['getDocument'];
5
+ declare const updateDocument: Adapter['updateDocument'];
6
+ declare const deleteDocument: Adapter['deleteDocument'];
7
7
  export { createDocument, getDocument, getCollection, updateDocument, deleteDocument };
@@ -1,9 +1,12 @@
1
1
  import config from '../../config.js';
2
2
  import { Firestore } from '@google-cloud/firestore';
3
+ const firestoreConfig = config.database.providers.find((provider) => provider.name === 'firestore');
4
+ if (!firestoreConfig)
5
+ throw new Error('firestore-provider-not-found');
3
6
  const firestore = new Firestore({
4
- credentials: config.database.credentials,
5
- databaseId: config.database.databaseId,
6
- projectId: config.database.projectId
7
+ credentials: firestoreConfig.credentials,
8
+ databaseId: firestoreConfig.databaseId,
9
+ projectId: firestoreConfig.projectId
7
10
  });
8
11
  const createDocument = async (reference, data) => {
9
12
  const document = await firestore.collection(reference.name).add(data);
@@ -1 +1,2 @@
1
- export default function (): Promise<void>;
1
+ declare function deploy(): Promise<void>;
2
+ export default deploy;
@@ -1,67 +1,111 @@
1
1
  import config from '../../config.js';
2
- import { getBucket } from '../storage/storage.js';
3
- import { readdir, lstat } from 'node:fs/promises';
4
- import { createReadStream } from 'node:fs';
5
- import { join } from 'node:path';
6
- import { CloudFunctionsServiceClient } from '@google-cloud/functions';
7
- const functionsClient = new CloudFunctionsServiceClient({
2
+ import { createReadStream, createWriteStream } from 'node:fs';
3
+ import { resolve, dirname, basename } from 'node:path';
4
+ import { fileURLToPath } from 'node:url';
5
+ import { v2 } from '@google-cloud/functions';
6
+ import archiver from 'archiver';
7
+ const { FunctionServiceClient } = v2;
8
+ const functionsClient = new FunctionServiceClient({
8
9
  credentials: config.deployment.credentials
9
10
  });
10
11
  const projectId = config.deployment.projectId;
11
12
  const region = config.deployment.region;
12
- async function uploadDirectory(bucketName, directoryPath, prefix = '') {
13
- const bucket = getBucket(bucketName);
14
- const files = await readdir(directoryPath);
15
- for (const file of files) {
16
- const filePath = join(directoryPath, file);
17
- const destination = join(prefix, file);
18
- const isFileDirectory = (await lstat(filePath)).isDirectory();
19
- if (isFileDirectory) {
20
- await uploadDirectory(bucketName, filePath, destination);
13
+ const currentDir = dirname(fileURLToPath(import.meta.url));
14
+ async function createZip(source, injectPaths, ignorePaths, out) {
15
+ await new Promise((resolve, reject) => {
16
+ const output = createWriteStream(out);
17
+ const archive = archiver('zip', { zlib: { level: 9 } });
18
+ output.on('close', () => {
19
+ resolve();
20
+ });
21
+ archive.on('error', (err) => {
22
+ reject(err);
23
+ });
24
+ archive.pipe(output);
25
+ archive.glob(source, { ignore: ignorePaths });
26
+ for (const path of injectPaths) {
27
+ archive.file(path, { name: basename(path) });
21
28
  }
22
- else {
23
- const fileStream = createReadStream(filePath);
24
- const gcsFile = bucket.file(destination);
25
- await new Promise((resolve, reject) => {
26
- fileStream
27
- .pipe(gcsFile.createWriteStream())
28
- .on('error', reject)
29
- .on('finish', resolve);
30
- });
31
- }
32
- }
29
+ archive.finalize();
30
+ });
33
31
  }
34
- async function uploadSourceCode(bucketName, source, dest) {
35
- const bucket = getBucket(bucketName);
36
- const uploadResponse = await bucket.upload(source, {
37
- gzip: true,
38
- destination: dest
32
+ async function uploadSource(sourceArchivePath) {
33
+ const location = functionsClient.locationPath(projectId, region);
34
+ const [urlResponse] = await functionsClient.generateUploadUrl({ parent: location });
35
+ const uploadUrl = urlResponse.uploadUrl;
36
+ const storageSource = urlResponse.storageSource;
37
+ if (!uploadUrl || !storageSource)
38
+ throw new Error('Upload URL not found');
39
+ const sourceArchiveStream = createReadStream(sourceArchivePath);
40
+ await fetch(uploadUrl, {
41
+ method: 'PUT',
42
+ // @ts-expect-error: invalid typings
43
+ body: sourceArchiveStream,
44
+ duplex: 'half',
45
+ headers: {
46
+ 'Content-Type': 'application/zip'
47
+ }
39
48
  });
40
- const file = uploadResponse[0];
41
- return file.cloudStorageURI.toString();
49
+ return storageSource;
42
50
  }
43
- async function deployFunction(name, source) {
51
+ async function deployFunction(functionName, storageSource) {
44
52
  const location = functionsClient.locationPath(projectId, region);
45
- const [response] = await functionsClient.createFunction({
46
- location,
53
+ const name = functionsClient.functionPath(projectId, region, functionName);
54
+ let isFunctionExisting;
55
+ try {
56
+ await functionsClient.getFunction({ name });
57
+ isFunctionExisting = true;
58
+ }
59
+ catch (error) {
60
+ isFunctionExisting = false;
61
+ }
62
+ const operationParams = {
63
+ functionId: functionName,
64
+ parent: location,
47
65
  function: {
48
66
  name,
49
- sourceUploadUrl: source,
50
- entryPoint: 'handler',
51
- runtime: 'nodejs20',
52
- httpsTrigger: {},
53
- environmentVariables: {
54
- NODE_ENV: 'production'
67
+ buildConfig: {
68
+ entryPoint: 'genoacms',
69
+ runtime: 'nodejs20',
70
+ source: {
71
+ storageSource
72
+ }
73
+ },
74
+ serviceConfig: {
75
+ minInstanceCount: 0,
76
+ maxInstanceCount: 1,
77
+ ingressSettings: 1,
78
+ environmentVariables: {
79
+ NODE_ENV: 'production'
80
+ }
55
81
  }
56
82
  }
57
- }, {});
83
+ };
84
+ let response;
85
+ if (isFunctionExisting) {
86
+ [response] = await functionsClient.updateFunction(operationParams);
87
+ }
88
+ else {
89
+ [response] = await functionsClient.createFunction(operationParams);
90
+ }
58
91
  console.log(response);
59
92
  }
60
- export default async function () {
61
- const bucketName = config.storage.defaultBucket;
62
- const assetsPath = '.genoacms/deployment/static';
63
- const buildArchivePath = '.genoacms/deployment/build.zip';
64
- await uploadDirectory(bucketName, './static', assetsPath);
65
- const buildArchiveURI = await uploadSourceCode(bucketName, './build', buildArchivePath);
66
- await deployFunction('genoacms', buildArchiveURI);
93
+ async function deploy() {
94
+ const buildDirectoryPath = '**';
95
+ const buildArchivePath = resolve(currentDir, '../../../deployment/build.zip');
96
+ const functionEntryScriptPath = resolve(currentDir, '../../../deployment/snippets/index.js');
97
+ const ignoreArchivePaths = [
98
+ 'node_modules/**',
99
+ '.git/**',
100
+ '.github/**',
101
+ '.gitignore',
102
+ 'build/**'
103
+ ];
104
+ const injectArchivePaths = [
105
+ functionEntryScriptPath
106
+ ];
107
+ await createZip(buildDirectoryPath, injectArchivePaths, ignoreArchivePaths, buildArchivePath);
108
+ const functionStorageSource = await uploadSource(buildArchivePath);
109
+ await deployFunction('genoacms', functionStorageSource);
67
110
  }
111
+ export default deploy;
@@ -1,4 +1,4 @@
1
- const svelteKitAdapter = '@sveltejs/adapter-node';
1
+ const svelteKitAdapter = '@genoacms/sveltekit-adapter-cloud-run-functions';
2
2
  const deployProcedure = async () => {
3
3
  const deploy = (await import('./deploy.js')).default;
4
4
  await deploy();
@@ -1,9 +1,9 @@
1
1
  import type { Adapter } from '@genoacms/cloudabstraction/storage';
2
- declare const getObject: Adapter.getObject;
3
- declare const getPublicURL: Adapter.getPublicURL;
4
- declare const getSignedURL: Adapter.getSignedURL;
5
- declare const uploadObject: Adapter.uploadObject;
6
- declare const deleteObject: Adapter.deleteObject;
7
- declare const listDirectory: Adapter.listDirectory;
8
- declare const createDirectory: Adapter.createDirectory;
2
+ declare const getObject: Adapter['getObject'];
3
+ declare const getPublicURL: Adapter['getPublicURL'];
4
+ declare const getSignedURL: Adapter['getSignedURL'];
5
+ declare const uploadObject: Adapter['uploadObject'];
6
+ declare const deleteObject: Adapter['deleteObject'];
7
+ declare const listDirectory: Adapter['listDirectory'];
8
+ declare const createDirectory: Adapter['createDirectory'];
9
9
  export { getObject, getPublicURL, getSignedURL, uploadObject, deleteObject, listDirectory, createDirectory };
@@ -1,4 +1,5 @@
1
1
  import { getBucket } from './storage.js';
2
+ import { join } from 'path';
2
3
  const getObject = async ({ bucket, name }) => {
3
4
  const bucketInstance = getBucket(bucket);
4
5
  const file = bucketInstance.file(name);
@@ -11,11 +12,9 @@ const getPublicURL = async ({ bucket, name }) => {
11
12
  const file = bucketInstance.file(name);
12
13
  return file.publicUrl();
13
14
  };
14
- const getSignedURL = async ({ bucket, name }) => {
15
+ const getSignedURL = async ({ bucket, name }, expires) => {
15
16
  const bucketInstance = getBucket(bucket);
16
17
  const file = bucketInstance.file(name);
17
- const expires = new Date();
18
- expires.setTime(expires.getTime() + 60 * 60 * 1_000);
19
18
  const [url] = await file.getSignedUrl({
20
19
  action: 'read',
21
20
  expires
@@ -36,7 +35,7 @@ const listDirectory = async ({ bucket, name }, listingParams = {}) => {
36
35
  const bucketInstance = getBucket(bucket);
37
36
  const options = {
38
37
  autoPaginate: false,
39
- prefix: name,
38
+ prefix: join(name, '/'),
40
39
  maxResults: listingParams?.limit,
41
40
  startOffset: listingParams?.startAfter,
42
41
  delimiter: '/'
@@ -52,7 +51,7 @@ const listDirectory = async ({ bucket, name }, listingParams = {}) => {
52
51
  lastModified: new Date(file.metadata.updated)
53
52
  };
54
53
  }),
55
- directories: apiResponse?.prefixes ?? []
54
+ directories: (apiResponse?.prefixes ?? []).filter((item) => item !== name)
56
55
  };
57
56
  };
58
57
  const createDirectory = async ({ bucket, name }) => {
@@ -1,5 +1,8 @@
1
1
  import config from '../../config.js';
2
2
  import { Storage } from '@google-cloud/storage';
3
+ const storageConfig = config.storage.providers.find((provider) => provider.name === 'gcs');
4
+ if (!storageConfig)
5
+ throw new Error('storage-provider-not-found');
3
6
  const storage = new Storage({
4
7
  credentials: config.storage.credentials
5
8
  });
package/package.json CHANGED
@@ -1,6 +1,7 @@
1
1
  {
2
2
  "name": "@genoacms/adapter-gcp",
3
- "version": "0.5.2",
3
+ "version": "0.7.1",
4
+ "type": "module",
4
5
  "description": "Implementation of abstraction layer of GenoaCMS for GCP",
5
6
  "repository": {
6
7
  "type": "git",
@@ -15,15 +16,19 @@
15
16
  "url": "https://github.com/GenoaCMS/adapter-gcp/issues"
16
17
  },
17
18
  "homepage": "https://github.com/GenoaCMS/adapter-gcp#readme",
18
- "type": "module",
19
+ "publishConfig": {
20
+ "provenance": true
21
+ },
19
22
  "dependencies": {
20
- "@genoacms/cloudabstraction": "^0.5.2",
23
+ "@genoacms/cloudabstraction": "^0.7.1",
21
24
  "@google-cloud/firestore": "^7.1.0",
22
- "@google-cloud/functions": "^3.2.0",
25
+ "@google-cloud/functions": "^3.4.0",
23
26
  "@google-cloud/resource-manager": "^5.1.0",
24
- "@google-cloud/storage": "^7.7.0"
27
+ "@google-cloud/storage": "^7.7.0",
28
+ "archiver": "^7.0.0"
25
29
  },
26
30
  "devDependencies": {
31
+ "@types/archiver": "^6.0.2",
27
32
  "@typescript-eslint/eslint-plugin": "^6.9.0",
28
33
  "eslint": "^8.52.0",
29
34
  "eslint-config-standard-with-typescript": "^39.1.1",
@@ -35,11 +40,11 @@
35
40
  "vitest": "^1.0.4"
36
41
  },
37
42
  "peerDependencies": {
38
- "@sveltejs/adapter-node": "^4.0.1",
39
- "@google-cloud/functions-framework": "^3.3.0"
43
+ "@genoacms/sveltekit-adapter-cloud-run-functions": "^1.0.0"
40
44
  },
41
45
  "files": [
42
46
  "src",
47
+ "deployment",
43
48
  "dist"
44
49
  ],
45
50
  "exports": {
@@ -51,6 +56,10 @@
51
56
  "import": "./dist/services/database/index.js",
52
57
  "types": "./dist/services/database/index.d.ts"
53
58
  },
59
+ "./deployment": {
60
+ "import": "./dist/services/deployment/index.js",
61
+ "types": "./dist/services/deployment/index.d.ts"
62
+ },
54
63
  "./storage": {
55
64
  "import": "./dist/services/storage/index.js",
56
65
  "types": "./dist/services/storage/index.d.ts"
@@ -1,7 +1,9 @@
1
- import type { Adapter } from '@genoacms/cloudabstraction/authorization'
1
+ import type { Adapter, AuthorizationProvider } from '@genoacms/cloudabstraction/authorization'
2
2
  import config from '../../config.js'
3
3
  import { ProjectsClient } from '@google-cloud/resource-manager'
4
4
 
5
+ const authorizationConfig = config.authorization.providers.find((provider: AuthorizationProvider) => provider.name === 'gcp')
6
+ if (!authorizationConfig) throw new Error('authorization-provider-not-found')
5
7
  const resourceManager = new ProjectsClient({
6
8
  projectId: config.authorization.projectId,
7
9
  credentials: config.authorization.credentials
@@ -1,5 +1,6 @@
1
1
  import type {
2
2
  Adapter,
3
+ DatabaseProvider,
3
4
  CollectionSnapshot,
4
5
  Document,
5
6
  DocumentReference,
@@ -9,13 +10,15 @@ import type {
9
10
  import config from '../../config.js'
10
11
  import { Firestore } from '@google-cloud/firestore'
11
12
 
13
+ const firestoreConfig = config.database.providers.find((provider: DatabaseProvider) => provider.name === 'firestore')
14
+ if (!firestoreConfig) throw new Error('firestore-provider-not-found')
12
15
  const firestore = new Firestore({
13
- credentials: config.database.credentials,
14
- databaseId: config.database.databaseId,
15
- projectId: config.database.projectId
16
+ credentials: firestoreConfig.credentials,
17
+ databaseId: firestoreConfig.databaseId,
18
+ projectId: firestoreConfig.projectId
16
19
  })
17
20
 
18
- const createDocument: Adapter.createDocument = async (reference, data) => {
21
+ const createDocument: Adapter['createDocument'] = async (reference, data) => {
19
22
  const document = await firestore.collection(reference.name).add(data)
20
23
  const documentReference: DocumentReference<typeof reference> = {
21
24
  collection: reference,
@@ -27,7 +30,7 @@ const createDocument: Adapter.createDocument = async (reference, data) => {
27
30
  } satisfies DocumentSnapshot<typeof reference>
28
31
  }
29
32
 
30
- const getCollection: Adapter.getCollection = async (reference) => {
33
+ const getCollection: Adapter['getCollection'] = async (reference) => {
31
34
  const collection = await firestore.collection(reference.name).get()
32
35
  const documents: CollectionSnapshot<typeof reference> = []
33
36
 
@@ -48,7 +51,7 @@ const getCollection: Adapter.getCollection = async (reference) => {
48
51
  return documents
49
52
  }
50
53
 
51
- const getDocument: Adapter.getDocument = async ({ collection, id }) => {
54
+ const getDocument: Adapter['getDocument'] = async ({ collection, id }) => {
52
55
  const document = await firestore.collection(collection.name).doc(id).get()
53
56
  if (!document.exists) return undefined
54
57
  const documentReference: DocumentReference<typeof collection> = {
@@ -62,7 +65,7 @@ const getDocument: Adapter.getDocument = async ({ collection, id }) => {
62
65
  return documentSnapshot
63
66
  }
64
67
 
65
- const updateDocument: Adapter.updateDocument = async (reference, document) => {
68
+ const updateDocument: Adapter['updateDocument'] = async (reference, document) => {
66
69
  await firestore.collection(reference.collection.name).doc(reference.id).update(document)
67
70
  return {
68
71
  reference,
@@ -70,7 +73,7 @@ const updateDocument: Adapter.updateDocument = async (reference, document) => {
70
73
  } satisfies UpdateSnapshot<typeof reference.collection>
71
74
  }
72
75
 
73
- const deleteDocument: Adapter.deleteDocument = async (reference) => {
76
+ const deleteDocument: Adapter['deleteDocument'] = async (reference) => {
74
77
  await firestore.collection(reference.collection.name).doc(reference.id).delete()
75
78
  }
76
79
 
@@ -1,74 +1,120 @@
1
1
  import config from '../../config.js'
2
- import { getBucket } from '../storage/storage.js'
3
- import { readdir, lstat } from 'node:fs/promises'
4
- import { createReadStream } from 'node:fs'
5
- import { join } from 'node:path'
6
- import { CloudFunctionsServiceClient } from '@google-cloud/functions'
2
+ import { createReadStream, createWriteStream } from 'node:fs'
3
+ import { resolve, dirname, basename } from 'node:path'
4
+ import { fileURLToPath } from 'node:url'
5
+ import { v2 } from '@google-cloud/functions'
6
+ import archiver from 'archiver'
7
+ import type { google } from '@google-cloud/functions/build/protos/protos.js'
8
+ type IStorageSource = google.cloud.functions.v2.IStorageSource
7
9
 
8
- const functionsClient = new CloudFunctionsServiceClient({
10
+ const { FunctionServiceClient } = v2
11
+ const functionsClient = new FunctionServiceClient({
9
12
  credentials: config.deployment.credentials
10
13
  })
11
14
  const projectId = config.deployment.projectId
12
15
  const region = config.deployment.region
13
16
 
14
- async function uploadDirectory (bucketName: string, directoryPath: string, prefix = ''): Promise<void> {
15
- const bucket = getBucket(bucketName)
16
- const files = await readdir(directoryPath)
17
+ const currentDir = dirname(fileURLToPath(import.meta.url))
17
18
 
18
- for (const file of files) {
19
- const filePath = join(directoryPath, file)
20
- const destination = join(prefix, file)
19
+ async function createZip (source: string, injectPaths: string[], ignorePaths: string[], out: string): Promise<void> {
20
+ await new Promise<void>((resolve, reject) => {
21
+ const output = createWriteStream(out)
22
+ const archive = archiver('zip', { zlib: { level: 9 } })
21
23
 
22
- const isFileDirectory = (await lstat(filePath)).isDirectory()
23
- if (isFileDirectory) {
24
- await uploadDirectory(bucketName, filePath, destination)
25
- } else {
26
- const fileStream = createReadStream(filePath)
27
- const gcsFile = bucket.file(destination)
24
+ output.on('close', () => {
25
+ resolve()
26
+ })
28
27
 
29
- await new Promise((resolve, reject) => {
30
- fileStream
31
- .pipe(gcsFile.createWriteStream())
32
- .on('error', reject)
33
- .on('finish', resolve)
34
- })
28
+ archive.on('error', (err) => {
29
+ reject(err)
30
+ })
31
+
32
+ archive.pipe(output)
33
+ archive.glob(source, { ignore: ignorePaths })
34
+ for (const path of injectPaths) {
35
+ archive.file(path, { name: basename(path) })
35
36
  }
36
- }
37
+ archive.finalize()
38
+ })
37
39
  }
38
40
 
39
- async function uploadSourceCode (bucketName: string, source: string, dest: string): Promise<string> {
40
- const bucket = getBucket(bucketName)
41
- const uploadResponse = await bucket.upload(source, {
42
- gzip: true,
43
- destination: dest
41
+ async function uploadSource (sourceArchivePath: string): Promise<IStorageSource> {
42
+ const location = functionsClient.locationPath(projectId, region)
43
+ const [urlResponse] = await functionsClient.generateUploadUrl({ parent: location })
44
+ const uploadUrl = urlResponse.uploadUrl
45
+ const storageSource = urlResponse.storageSource
46
+ if (!uploadUrl || !storageSource) throw new Error('Upload URL not found')
47
+ const sourceArchiveStream = createReadStream(sourceArchivePath)
48
+ await fetch(uploadUrl, {
49
+ method: 'PUT',
50
+ // @ts-expect-error: invalid typings
51
+ body: sourceArchiveStream,
52
+ duplex: 'half',
53
+ headers: {
54
+ 'Content-Type': 'application/zip'
55
+ }
44
56
  })
45
- const file = uploadResponse[0]
46
- return file.cloudStorageURI.toString()
57
+ return storageSource
47
58
  }
48
59
 
49
- async function deployFunction (name: string, source: string): Promise<void> {
60
+ async function deployFunction (functionName: string, storageSource: IStorageSource): Promise<void> {
50
61
  const location = functionsClient.locationPath(projectId, region)
51
- const [response] = await functionsClient.createFunction({
52
- location,
62
+ const name = functionsClient.functionPath(projectId, region, functionName)
63
+ let isFunctionExisting: boolean
64
+ try {
65
+ await functionsClient.getFunction({ name })
66
+ isFunctionExisting = true
67
+ } catch (error) {
68
+ isFunctionExisting = false
69
+ }
70
+ const operationParams = {
71
+ functionId: functionName,
72
+ parent: location,
53
73
  function: {
54
74
  name,
55
- sourceUploadUrl: source,
56
- entryPoint: 'handler',
57
- runtime: 'nodejs20',
58
- httpsTrigger: {},
59
- environmentVariables: {
60
- NODE_ENV: 'production'
75
+ buildConfig: {
76
+ entryPoint: 'genoacms',
77
+ runtime: 'nodejs20',
78
+ source: {
79
+ storageSource
80
+ }
81
+ },
82
+ serviceConfig: {
83
+ minInstanceCount: 0,
84
+ maxInstanceCount: 1,
85
+ ingressSettings: 1, // ALLOW_ALL
86
+ environmentVariables: {
87
+ NODE_ENV: 'production'
88
+ }
61
89
  }
62
90
  }
63
- }, {})
91
+ }
92
+ let response
93
+ if (isFunctionExisting) {
94
+ [response] = await functionsClient.updateFunction(operationParams)
95
+ } else {
96
+ [response] = await functionsClient.createFunction(operationParams)
97
+ }
64
98
  console.log(response)
65
99
  }
66
100
 
67
- export default async function (): Promise<void> {
68
- const bucketName = config.storage.defaultBucket
69
- const assetsPath = '.genoacms/deployment/static'
70
- const buildArchivePath = '.genoacms/deployment/build.zip'
71
- await uploadDirectory(bucketName, './static', assetsPath)
72
- const buildArchiveURI = await uploadSourceCode(bucketName, './build', buildArchivePath)
73
- await deployFunction('genoacms', buildArchiveURI)
101
+ async function deploy (): Promise<void> {
102
+ const buildDirectoryPath = '**'
103
+ const buildArchivePath = resolve(currentDir, '../../../deployment/build.zip')
104
+ const functionEntryScriptPath = resolve(currentDir, '../../../deployment/snippets/index.js')
105
+ const ignoreArchivePaths = [
106
+ 'node_modules/**',
107
+ '.git/**',
108
+ '.github/**',
109
+ '.gitignore',
110
+ 'build/**'
111
+ ]
112
+ const injectArchivePaths = [
113
+ functionEntryScriptPath
114
+ ]
115
+ await createZip(buildDirectoryPath, injectArchivePaths, ignoreArchivePaths, buildArchivePath)
116
+ const functionStorageSource = await uploadSource(buildArchivePath)
117
+ await deployFunction('genoacms', functionStorageSource)
74
118
  }
119
+
120
+ export default deploy
@@ -1,6 +1,6 @@
1
1
  import type { Adapter } from '@genoacms/cloudabstraction/deployment'
2
2
 
3
- const svelteKitAdapter: Adapter.svelteKitAdapter = '@sveltejs/adapter-node'
3
+ const svelteKitAdapter: Adapter.svelteKitAdapter = '@genoacms/sveltekit-adapter-cloud-run-functions'
4
4
 
5
5
  const deployProcedure: Adapter.deployProcedure = async () => {
6
6
  const deploy = (await import('./deploy.js')).default
@@ -4,8 +4,9 @@ import type {
4
4
  } from '@genoacms/cloudabstraction/storage'
5
5
  import { type File } from '@google-cloud/storage'
6
6
  import { getBucket } from './storage.js'
7
+ import { join } from 'path'
7
8
 
8
- const getObject: Adapter.getObject = async ({ bucket, name }) => {
9
+ const getObject: Adapter['getObject'] = async ({ bucket, name }) => {
9
10
  const bucketInstance = getBucket(bucket)
10
11
  const file = bucketInstance.file(name)
11
12
 
@@ -14,17 +15,15 @@ const getObject: Adapter.getObject = async ({ bucket, name }) => {
14
15
  }
15
16
  }
16
17
 
17
- const getPublicURL: Adapter.getPublicURL = async ({ bucket, name }) => {
18
+ const getPublicURL: Adapter['getPublicURL'] = async ({ bucket, name }) => {
18
19
  const bucketInstance = getBucket(bucket)
19
20
  const file = bucketInstance.file(name)
20
21
  return file.publicUrl()
21
22
  }
22
23
 
23
- const getSignedURL: Adapter.getSignedURL = async ({ bucket, name }) => {
24
+ const getSignedURL: Adapter['getSignedURL'] = async ({ bucket, name }, expires) => {
24
25
  const bucketInstance = getBucket(bucket)
25
26
  const file = bucketInstance.file(name)
26
- const expires = new Date()
27
- expires.setTime(expires.getTime() + 60 * 60 * 1_000)
28
27
  const [url] = await file.getSignedUrl({
29
28
  action: 'read',
30
29
  expires
@@ -32,23 +31,23 @@ const getSignedURL: Adapter.getSignedURL = async ({ bucket, name }) => {
32
31
  return url
33
32
  }
34
33
 
35
- const uploadObject: Adapter.uploadObject = async ({ bucket, name }, stream, options) => {
34
+ const uploadObject: Adapter['uploadObject'] = async ({ bucket, name }, stream, options) => {
36
35
  const bucketInstance = getBucket(bucket)
37
36
  const file = bucketInstance.file(name)
38
37
  await file.save(stream, options)
39
38
  }
40
39
 
41
- const deleteObject: Adapter.deleteObject = async ({ bucket, name }) => {
40
+ const deleteObject: Adapter['deleteObject'] = async ({ bucket, name }) => {
42
41
  const bucketInstance = getBucket(bucket)
43
42
  const file = bucketInstance.file(name)
44
43
  await file.delete()
45
44
  }
46
45
 
47
- const listDirectory: Adapter.listDirectory = async ({ bucket, name }, listingParams = {}) => {
46
+ const listDirectory: Adapter['listDirectory'] = async ({ bucket, name }, listingParams = {}) => {
48
47
  const bucketInstance = getBucket(bucket)
49
48
  const options = {
50
49
  autoPaginate: false,
51
- prefix: name,
50
+ prefix: join(name, '/'),
52
51
  maxResults: listingParams?.limit,
53
52
  startOffset: listingParams?.startAfter,
54
53
  delimiter: '/'
@@ -67,11 +66,11 @@ const listDirectory: Adapter.listDirectory = async ({ bucket, name }, listingPar
67
66
  lastModified: new Date(file.metadata.updated as string)
68
67
  } satisfies StorageObject
69
68
  }),
70
- directories: apiResponse?.prefixes ?? []
69
+ directories: (apiResponse?.prefixes ?? []).filter((item) => item !== name)
71
70
  }
72
71
  }
73
72
 
74
- const createDirectory: Adapter.createDirectory = async ({ bucket, name }) => {
73
+ const createDirectory: Adapter['createDirectory'] = async ({ bucket, name }) => {
75
74
  const bucketInstance = getBucket(bucket)
76
75
  const file = bucketInstance.file(`${name}/.folderPlaceholder`)
77
76
  await file.save('')
@@ -1,6 +1,9 @@
1
1
  import config from '../../config.js'
2
2
  import { type Bucket, Storage } from '@google-cloud/storage'
3
+ import type { StorageProvider } from '@genoacms/cloudabstraction/storage'
3
4
 
5
+ const storageConfig = config.storage.providers.find((provider: StorageProvider) => provider.name === 'gcs')
6
+ if (!storageConfig) throw new Error('storage-provider-not-found')
4
7
  const storage = new Storage({
5
8
  credentials: config.storage.credentials
6
9
  })
@@ -1,8 +0,0 @@
1
- import { HttpFunction } from '@google-cloud/functions-framework'
2
- import app from './build'
3
-
4
- const svelteKitApp = HttpFunction(app.handler)
5
-
6
- export {
7
- svelteKitApp
8
- }