@genoacms/adapter-gcp 0.5.2-fix.1 → 0.5.2-fix.10
Sign up to get free protection for your applications and to get access to all the features.
- package/deployment/snippets/index.js +8 -0
- package/dist/services/deployment/deploy.d.ts +2 -1
- package/dist/services/deployment/deploy.js +94 -50
- package/dist/services/storage/index.js +4 -5
- package/package.json +7 -5
- package/src/services/deployment/deploy.ts +95 -49
- package/src/services/storage/index.ts +4 -5
- package/src/services/deployment/cloudFunction.js +0 -8
@@ -1 +1,2 @@
|
|
1
|
-
|
1
|
+
declare function deploy(): Promise<void>;
|
2
|
+
export default deploy;
|
@@ -1,67 +1,111 @@
|
|
1
1
|
import config from '../../config.js';
|
2
|
-
import {
|
3
|
-
import {
|
4
|
-
import {
|
5
|
-
import {
|
6
|
-
import
|
7
|
-
const
|
2
|
+
import { createReadStream, createWriteStream } from 'node:fs';
|
3
|
+
import { resolve, dirname, basename } from 'node:path';
|
4
|
+
import { fileURLToPath } from 'node:url';
|
5
|
+
import { v2 } from '@google-cloud/functions';
|
6
|
+
import archiver from 'archiver';
|
7
|
+
const { FunctionServiceClient } = v2;
|
8
|
+
const functionsClient = new FunctionServiceClient({
|
8
9
|
credentials: config.deployment.credentials
|
9
10
|
});
|
10
11
|
const projectId = config.deployment.projectId;
|
11
12
|
const region = config.deployment.region;
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
const
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
13
|
+
const currentDir = dirname(fileURLToPath(import.meta.url));
|
14
|
+
async function createZip(source, injectPaths, ignorePaths, out) {
|
15
|
+
await new Promise((resolve, reject) => {
|
16
|
+
const output = createWriteStream(out);
|
17
|
+
const archive = archiver('zip', { zlib: { level: 9 } });
|
18
|
+
output.on('close', () => {
|
19
|
+
resolve();
|
20
|
+
});
|
21
|
+
archive.on('error', (err) => {
|
22
|
+
reject(err);
|
23
|
+
});
|
24
|
+
archive.pipe(output);
|
25
|
+
archive.glob(source, { ignore: ignorePaths });
|
26
|
+
for (const path of injectPaths) {
|
27
|
+
archive.file(path, { name: basename(path) });
|
21
28
|
}
|
22
|
-
|
23
|
-
|
24
|
-
const gcsFile = bucket.file(destination);
|
25
|
-
await new Promise((resolve, reject) => {
|
26
|
-
fileStream
|
27
|
-
.pipe(gcsFile.createWriteStream())
|
28
|
-
.on('error', reject)
|
29
|
-
.on('finish', resolve);
|
30
|
-
});
|
31
|
-
}
|
32
|
-
}
|
29
|
+
archive.finalize();
|
30
|
+
});
|
33
31
|
}
|
34
|
-
async function
|
35
|
-
const
|
36
|
-
const
|
37
|
-
|
38
|
-
|
32
|
+
async function uploadSource(sourceArchivePath) {
|
33
|
+
const location = functionsClient.locationPath(projectId, region);
|
34
|
+
const [urlResponse] = await functionsClient.generateUploadUrl({ parent: location });
|
35
|
+
const uploadUrl = urlResponse.uploadUrl;
|
36
|
+
const storageSource = urlResponse.storageSource;
|
37
|
+
if (!uploadUrl || !storageSource)
|
38
|
+
throw new Error('Upload URL not found');
|
39
|
+
const sourceArchiveStream = createReadStream(sourceArchivePath);
|
40
|
+
await fetch(uploadUrl, {
|
41
|
+
method: 'PUT',
|
42
|
+
// @ts-expect-error: invalid typings
|
43
|
+
body: sourceArchiveStream,
|
44
|
+
duplex: 'half',
|
45
|
+
headers: {
|
46
|
+
'Content-Type': 'application/zip'
|
47
|
+
}
|
39
48
|
});
|
40
|
-
|
41
|
-
return file.cloudStorageURI.toString();
|
49
|
+
return storageSource;
|
42
50
|
}
|
43
|
-
async function deployFunction(
|
51
|
+
async function deployFunction(functionName, storageSource) {
|
44
52
|
const location = functionsClient.locationPath(projectId, region);
|
45
|
-
const
|
46
|
-
|
53
|
+
const name = functionsClient.functionPath(projectId, region, functionName);
|
54
|
+
let isFunctionExisting;
|
55
|
+
try {
|
56
|
+
await functionsClient.getFunction({ name });
|
57
|
+
isFunctionExisting = true;
|
58
|
+
}
|
59
|
+
catch (error) {
|
60
|
+
isFunctionExisting = false;
|
61
|
+
}
|
62
|
+
const operationParams = {
|
63
|
+
functionId: functionName,
|
64
|
+
parent: location,
|
47
65
|
function: {
|
48
66
|
name,
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
67
|
+
buildConfig: {
|
68
|
+
entryPoint: 'genoacms',
|
69
|
+
runtime: 'nodejs20',
|
70
|
+
source: {
|
71
|
+
storageSource
|
72
|
+
}
|
73
|
+
},
|
74
|
+
serviceConfig: {
|
75
|
+
minInstanceCount: 0,
|
76
|
+
maxInstanceCount: 1,
|
77
|
+
ingressSettings: 1,
|
78
|
+
environmentVariables: {
|
79
|
+
NODE_ENV: 'production'
|
80
|
+
}
|
55
81
|
}
|
56
82
|
}
|
57
|
-
}
|
83
|
+
};
|
84
|
+
let response;
|
85
|
+
if (isFunctionExisting) {
|
86
|
+
[response] = await functionsClient.updateFunction(operationParams);
|
87
|
+
}
|
88
|
+
else {
|
89
|
+
[response] = await functionsClient.createFunction(operationParams);
|
90
|
+
}
|
58
91
|
console.log(response);
|
59
92
|
}
|
60
|
-
|
61
|
-
const
|
62
|
-
const
|
63
|
-
const
|
64
|
-
|
65
|
-
|
66
|
-
|
93
|
+
async function deploy() {
|
94
|
+
const buildDirectoryPath = '**';
|
95
|
+
const buildArchivePath = resolve(currentDir, '../../../deployment/build.zip');
|
96
|
+
const functionEntryScriptPath = resolve(currentDir, '../../../deployment/snippets/index.js');
|
97
|
+
const ignoreArchivePaths = [
|
98
|
+
'node_modules/**',
|
99
|
+
'.git/**',
|
100
|
+
'.github/**',
|
101
|
+
'.gitignore',
|
102
|
+
'build/**'
|
103
|
+
];
|
104
|
+
const injectArchivePaths = [
|
105
|
+
functionEntryScriptPath
|
106
|
+
];
|
107
|
+
await createZip(buildDirectoryPath, injectArchivePaths, ignoreArchivePaths, buildArchivePath);
|
108
|
+
const functionStorageSource = await uploadSource(buildArchivePath);
|
109
|
+
await deployFunction('genoacms', functionStorageSource);
|
67
110
|
}
|
111
|
+
export default deploy;
|
@@ -1,4 +1,5 @@
|
|
1
1
|
import { getBucket } from './storage.js';
|
2
|
+
import { join } from 'path';
|
2
3
|
const getObject = async ({ bucket, name }) => {
|
3
4
|
const bucketInstance = getBucket(bucket);
|
4
5
|
const file = bucketInstance.file(name);
|
@@ -11,11 +12,9 @@ const getPublicURL = async ({ bucket, name }) => {
|
|
11
12
|
const file = bucketInstance.file(name);
|
12
13
|
return file.publicUrl();
|
13
14
|
};
|
14
|
-
const getSignedURL = async ({ bucket, name }) => {
|
15
|
+
const getSignedURL = async ({ bucket, name }, expires) => {
|
15
16
|
const bucketInstance = getBucket(bucket);
|
16
17
|
const file = bucketInstance.file(name);
|
17
|
-
const expires = new Date();
|
18
|
-
expires.setTime(expires.getTime() + 60 * 60 * 1_000);
|
19
18
|
const [url] = await file.getSignedUrl({
|
20
19
|
action: 'read',
|
21
20
|
expires
|
@@ -36,7 +35,7 @@ const listDirectory = async ({ bucket, name }, listingParams = {}) => {
|
|
36
35
|
const bucketInstance = getBucket(bucket);
|
37
36
|
const options = {
|
38
37
|
autoPaginate: false,
|
39
|
-
prefix: name,
|
38
|
+
prefix: join(name, '/'),
|
40
39
|
maxResults: listingParams?.limit,
|
41
40
|
startOffset: listingParams?.startAfter,
|
42
41
|
delimiter: '/'
|
@@ -52,7 +51,7 @@ const listDirectory = async ({ bucket, name }, listingParams = {}) => {
|
|
52
51
|
lastModified: new Date(file.metadata.updated)
|
53
52
|
};
|
54
53
|
}),
|
55
|
-
directories: apiResponse?.prefixes ?? []
|
54
|
+
directories: (apiResponse?.prefixes ?? []).filter((item) => item !== name)
|
56
55
|
};
|
57
56
|
};
|
58
57
|
const createDirectory = async ({ bucket, name }) => {
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@genoacms/adapter-gcp",
|
3
|
-
"version": "0.5.2-fix.
|
3
|
+
"version": "0.5.2-fix.10",
|
4
4
|
"description": "Implementation of abstraction layer of GenoaCMS for GCP",
|
5
5
|
"repository": {
|
6
6
|
"type": "git",
|
@@ -19,11 +19,13 @@
|
|
19
19
|
"dependencies": {
|
20
20
|
"@genoacms/cloudabstraction": "^0.5.2",
|
21
21
|
"@google-cloud/firestore": "^7.1.0",
|
22
|
-
"@google-cloud/functions": "^3.
|
22
|
+
"@google-cloud/functions": "^3.4.0",
|
23
23
|
"@google-cloud/resource-manager": "^5.1.0",
|
24
|
-
"@google-cloud/storage": "^7.7.0"
|
24
|
+
"@google-cloud/storage": "^7.7.0",
|
25
|
+
"archiver": "^7.0.0"
|
25
26
|
},
|
26
27
|
"devDependencies": {
|
28
|
+
"@types/archiver": "^6.0.2",
|
27
29
|
"@typescript-eslint/eslint-plugin": "^6.9.0",
|
28
30
|
"eslint": "^8.52.0",
|
29
31
|
"eslint-config-standard-with-typescript": "^39.1.1",
|
@@ -35,11 +37,11 @@
|
|
35
37
|
"vitest": "^1.0.4"
|
36
38
|
},
|
37
39
|
"peerDependencies": {
|
38
|
-
"@sveltejs/adapter-node": "^4.0.1"
|
39
|
-
"@google-cloud/functions-framework": "^3.3.0"
|
40
|
+
"@sveltejs/adapter-node": "^4.0.1"
|
40
41
|
},
|
41
42
|
"files": [
|
42
43
|
"src",
|
44
|
+
"deployment",
|
43
45
|
"dist"
|
44
46
|
],
|
45
47
|
"exports": {
|
@@ -1,74 +1,120 @@
|
|
1
1
|
import config from '../../config.js'
|
2
|
-
import {
|
3
|
-
import {
|
4
|
-
import {
|
5
|
-
import {
|
6
|
-
import
|
2
|
+
import { createReadStream, createWriteStream } from 'node:fs'
|
3
|
+
import { resolve, dirname, basename } from 'node:path'
|
4
|
+
import { fileURLToPath } from 'node:url'
|
5
|
+
import { v2 } from '@google-cloud/functions'
|
6
|
+
import archiver from 'archiver'
|
7
|
+
import type { google } from '@google-cloud/functions/build/protos/protos.js'
|
8
|
+
type IStorageSource = google.cloud.functions.v2.IStorageSource
|
7
9
|
|
8
|
-
const
|
10
|
+
const { FunctionServiceClient } = v2
|
11
|
+
const functionsClient = new FunctionServiceClient({
|
9
12
|
credentials: config.deployment.credentials
|
10
13
|
})
|
11
14
|
const projectId = config.deployment.projectId
|
12
15
|
const region = config.deployment.region
|
13
16
|
|
14
|
-
|
15
|
-
const bucket = getBucket(bucketName)
|
16
|
-
const files = await readdir(directoryPath)
|
17
|
+
const currentDir = dirname(fileURLToPath(import.meta.url))
|
17
18
|
|
18
|
-
|
19
|
-
|
20
|
-
const
|
19
|
+
async function createZip (source: string, injectPaths: string[], ignorePaths: string[], out: string): Promise<void> {
|
20
|
+
await new Promise<void>((resolve, reject) => {
|
21
|
+
const output = createWriteStream(out)
|
22
|
+
const archive = archiver('zip', { zlib: { level: 9 } })
|
21
23
|
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
} else {
|
26
|
-
const fileStream = createReadStream(filePath)
|
27
|
-
const gcsFile = bucket.file(destination)
|
24
|
+
output.on('close', () => {
|
25
|
+
resolve()
|
26
|
+
})
|
28
27
|
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
28
|
+
archive.on('error', (err) => {
|
29
|
+
reject(err)
|
30
|
+
})
|
31
|
+
|
32
|
+
archive.pipe(output)
|
33
|
+
archive.glob(source, { ignore: ignorePaths })
|
34
|
+
for (const path of injectPaths) {
|
35
|
+
archive.file(path, { name: basename(path) })
|
35
36
|
}
|
36
|
-
|
37
|
+
archive.finalize()
|
38
|
+
})
|
37
39
|
}
|
38
40
|
|
39
|
-
async function
|
40
|
-
const
|
41
|
-
const
|
42
|
-
|
43
|
-
|
41
|
+
async function uploadSource (sourceArchivePath: string): Promise<IStorageSource> {
|
42
|
+
const location = functionsClient.locationPath(projectId, region)
|
43
|
+
const [urlResponse] = await functionsClient.generateUploadUrl({ parent: location })
|
44
|
+
const uploadUrl = urlResponse.uploadUrl
|
45
|
+
const storageSource = urlResponse.storageSource
|
46
|
+
if (!uploadUrl || !storageSource) throw new Error('Upload URL not found')
|
47
|
+
const sourceArchiveStream = createReadStream(sourceArchivePath)
|
48
|
+
await fetch(uploadUrl, {
|
49
|
+
method: 'PUT',
|
50
|
+
// @ts-expect-error: invalid typings
|
51
|
+
body: sourceArchiveStream,
|
52
|
+
duplex: 'half',
|
53
|
+
headers: {
|
54
|
+
'Content-Type': 'application/zip'
|
55
|
+
}
|
44
56
|
})
|
45
|
-
|
46
|
-
return file.cloudStorageURI.toString()
|
57
|
+
return storageSource
|
47
58
|
}
|
48
59
|
|
49
|
-
async function deployFunction (
|
60
|
+
async function deployFunction (functionName: string, storageSource: IStorageSource): Promise<void> {
|
50
61
|
const location = functionsClient.locationPath(projectId, region)
|
51
|
-
const
|
52
|
-
|
62
|
+
const name = functionsClient.functionPath(projectId, region, functionName)
|
63
|
+
let isFunctionExisting: boolean
|
64
|
+
try {
|
65
|
+
await functionsClient.getFunction({ name })
|
66
|
+
isFunctionExisting = true
|
67
|
+
} catch (error) {
|
68
|
+
isFunctionExisting = false
|
69
|
+
}
|
70
|
+
const operationParams = {
|
71
|
+
functionId: functionName,
|
72
|
+
parent: location,
|
53
73
|
function: {
|
54
74
|
name,
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
75
|
+
buildConfig: {
|
76
|
+
entryPoint: 'genoacms',
|
77
|
+
runtime: 'nodejs20',
|
78
|
+
source: {
|
79
|
+
storageSource
|
80
|
+
}
|
81
|
+
},
|
82
|
+
serviceConfig: {
|
83
|
+
minInstanceCount: 0,
|
84
|
+
maxInstanceCount: 1,
|
85
|
+
ingressSettings: 1, // ALLOW_ALL
|
86
|
+
environmentVariables: {
|
87
|
+
NODE_ENV: 'production'
|
88
|
+
}
|
61
89
|
}
|
62
90
|
}
|
63
|
-
}
|
91
|
+
}
|
92
|
+
let response
|
93
|
+
if (isFunctionExisting) {
|
94
|
+
[response] = await functionsClient.updateFunction(operationParams)
|
95
|
+
} else {
|
96
|
+
[response] = await functionsClient.createFunction(operationParams)
|
97
|
+
}
|
64
98
|
console.log(response)
|
65
99
|
}
|
66
100
|
|
67
|
-
|
68
|
-
const
|
69
|
-
const
|
70
|
-
const
|
71
|
-
|
72
|
-
|
73
|
-
|
101
|
+
async function deploy (): Promise<void> {
|
102
|
+
const buildDirectoryPath = '**'
|
103
|
+
const buildArchivePath = resolve(currentDir, '../../../deployment/build.zip')
|
104
|
+
const functionEntryScriptPath = resolve(currentDir, '../../../deployment/snippets/index.js')
|
105
|
+
const ignoreArchivePaths = [
|
106
|
+
'node_modules/**',
|
107
|
+
'.git/**',
|
108
|
+
'.github/**',
|
109
|
+
'.gitignore',
|
110
|
+
'build/**'
|
111
|
+
]
|
112
|
+
const injectArchivePaths = [
|
113
|
+
functionEntryScriptPath
|
114
|
+
]
|
115
|
+
await createZip(buildDirectoryPath, injectArchivePaths, ignoreArchivePaths, buildArchivePath)
|
116
|
+
const functionStorageSource = await uploadSource(buildArchivePath)
|
117
|
+
await deployFunction('genoacms', functionStorageSource)
|
74
118
|
}
|
119
|
+
|
120
|
+
export default deploy
|
@@ -4,6 +4,7 @@ import type {
|
|
4
4
|
} from '@genoacms/cloudabstraction/storage'
|
5
5
|
import { type File } from '@google-cloud/storage'
|
6
6
|
import { getBucket } from './storage.js'
|
7
|
+
import { join } from 'path'
|
7
8
|
|
8
9
|
const getObject: Adapter.getObject = async ({ bucket, name }) => {
|
9
10
|
const bucketInstance = getBucket(bucket)
|
@@ -20,11 +21,9 @@ const getPublicURL: Adapter.getPublicURL = async ({ bucket, name }) => {
|
|
20
21
|
return file.publicUrl()
|
21
22
|
}
|
22
23
|
|
23
|
-
const getSignedURL: Adapter.getSignedURL = async ({ bucket, name }) => {
|
24
|
+
const getSignedURL: Adapter.getSignedURL = async ({ bucket, name }, expires) => {
|
24
25
|
const bucketInstance = getBucket(bucket)
|
25
26
|
const file = bucketInstance.file(name)
|
26
|
-
const expires = new Date()
|
27
|
-
expires.setTime(expires.getTime() + 60 * 60 * 1_000)
|
28
27
|
const [url] = await file.getSignedUrl({
|
29
28
|
action: 'read',
|
30
29
|
expires
|
@@ -48,7 +47,7 @@ const listDirectory: Adapter.listDirectory = async ({ bucket, name }, listingPar
|
|
48
47
|
const bucketInstance = getBucket(bucket)
|
49
48
|
const options = {
|
50
49
|
autoPaginate: false,
|
51
|
-
prefix: name,
|
50
|
+
prefix: join(name, '/'),
|
52
51
|
maxResults: listingParams?.limit,
|
53
52
|
startOffset: listingParams?.startAfter,
|
54
53
|
delimiter: '/'
|
@@ -67,7 +66,7 @@ const listDirectory: Adapter.listDirectory = async ({ bucket, name }, listingPar
|
|
67
66
|
lastModified: new Date(file.metadata.updated as string)
|
68
67
|
} satisfies StorageObject
|
69
68
|
}),
|
70
|
-
directories: apiResponse?.prefixes ?? []
|
69
|
+
directories: (apiResponse?.prefixes ?? []).filter((item) => item !== name)
|
71
70
|
}
|
72
71
|
}
|
73
72
|
|