@flowfuse/file-server 1.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +67 -0
- package/LICENSE +178 -0
- package/README.md +231 -0
- package/etc/flowforge-storage.local.yml +22 -0
- package/etc/flowforge-storage.yml +26 -0
- package/flowforge-file-server-1.14.0.tgz +0 -0
- package/forge/auth.js +106 -0
- package/forge/config.js +93 -0
- package/forge/context-driver/quotaTools.js +53 -0
- package/forge/context-driver/sequelize.js +401 -0
- package/forge/driver.js +19 -0
- package/forge/drivers/localfs.js +142 -0
- package/forge/drivers/memory.js +72 -0
- package/forge/drivers/s3.js +143 -0
- package/forge/drivers/vfs.js +33 -0
- package/forge/fileServer.js +99 -0
- package/forge/routes/context.js +186 -0
- package/forge/routes/files.js +130 -0
- package/forge/routes/index.js +18 -0
- package/forge/routes/quota.js +9 -0
- package/index.js +63 -0
- package/package.json +63 -0
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
const fs = require('fs')
|
|
2
|
+
const { join, isAbsolute, dirname, sep: pathSeparator, parse } = require('path')
|
|
3
|
+
|
|
4
|
+
const canary = 'ROOT_DIR_CANARY'
|
|
5
|
+
|
|
6
|
+
async function readDirSize (dir) {
|
|
7
|
+
if (!fs.existsSync(dir)) {
|
|
8
|
+
return 0
|
|
9
|
+
}
|
|
10
|
+
const files = fs.readdirSync(dir, { withFileTypes: true })
|
|
11
|
+
const paths = files.map(async file => {
|
|
12
|
+
const path = join(dir, file.name)
|
|
13
|
+
if (file.isDirectory()) return await readDirSize(path)
|
|
14
|
+
|
|
15
|
+
if (file.isFile()) {
|
|
16
|
+
const { size } = fs.statSync(path)
|
|
17
|
+
return size
|
|
18
|
+
}
|
|
19
|
+
return 0
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
return (await Promise.all(paths)).flat(Infinity).reduce((i, size) => i + size, 0)
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function cleanError (error, path, op) {
|
|
26
|
+
if (error.code === 'ENOENT') {
|
|
27
|
+
const err = new Error(`ENOENT: no such file or directory, ${op || 'stat'} '${path}'`)
|
|
28
|
+
err.code = error.code
|
|
29
|
+
return err
|
|
30
|
+
} else if (error.code === 'EPERM') {
|
|
31
|
+
const err = new Error(`EPERM: operation not permitted, ${op || 'unlink'} '${path}'`)
|
|
32
|
+
err.code = error.code
|
|
33
|
+
return err
|
|
34
|
+
}
|
|
35
|
+
return error
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
module.exports = function (app) {
|
|
39
|
+
let rootPath
|
|
40
|
+
if (!isAbsolute(app.config.driver.options.root)) {
|
|
41
|
+
rootPath = join(app.config.home, app.config.driver.options?.root)
|
|
42
|
+
} else {
|
|
43
|
+
rootPath = app.config.driver.options.root
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (!fs.existsSync(rootPath)) {
|
|
47
|
+
fs.mkdirSync(rootPath, { recursive: true })
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
app.log.info(`FlowForge File Storage localfs root Directory: ${rootPath}`)
|
|
51
|
+
|
|
52
|
+
function resolvePath (teamId, projectId, path) {
|
|
53
|
+
let resolvedPath
|
|
54
|
+
if (isAbsolute(path)) {
|
|
55
|
+
const pp = parse(path)
|
|
56
|
+
const hasWinDrive = /[a-z|A-Z]:/.test(pp.root)
|
|
57
|
+
if (hasWinDrive) {
|
|
58
|
+
path = path.replace(':', '')
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
const minPath = join(canary, teamId, projectId, path) // limit traversal to /teamid/projectId/**
|
|
62
|
+
if (path.startsWith(pathSeparator)) {
|
|
63
|
+
resolvedPath = join(canary, teamId, path)
|
|
64
|
+
} else {
|
|
65
|
+
resolvedPath = join(canary, teamId, projectId, path)
|
|
66
|
+
}
|
|
67
|
+
if (resolvedPath.startsWith(minPath)) {
|
|
68
|
+
const array = resolvedPath.split(pathSeparator)
|
|
69
|
+
array.shift()
|
|
70
|
+
resolvedPath = array.join(pathSeparator)
|
|
71
|
+
return resolvedPath
|
|
72
|
+
} else {
|
|
73
|
+
const err = new Error('Invalid Path')
|
|
74
|
+
err.code = 'ENOTDIR'
|
|
75
|
+
throw err
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return {
|
|
80
|
+
get rootPath () {
|
|
81
|
+
return rootPath
|
|
82
|
+
},
|
|
83
|
+
resolvePath,
|
|
84
|
+
async ensureDir (teamId, projectId, path) {
|
|
85
|
+
try {
|
|
86
|
+
resolvePath(teamId, projectId, path)
|
|
87
|
+
return true
|
|
88
|
+
} catch (error) {
|
|
89
|
+
throw cleanError(error, path, 'stat')
|
|
90
|
+
}
|
|
91
|
+
},
|
|
92
|
+
async save (teamId, projectId, path, data) {
|
|
93
|
+
const fullPath = join(rootPath, resolvePath(teamId, projectId, path))
|
|
94
|
+
try {
|
|
95
|
+
fs.mkdirSync(dirname(fullPath), {
|
|
96
|
+
recursive: true
|
|
97
|
+
})
|
|
98
|
+
fs.writeFileSync(fullPath, data)
|
|
99
|
+
} catch (error) {
|
|
100
|
+
throw cleanError(error, path, 'open')
|
|
101
|
+
}
|
|
102
|
+
},
|
|
103
|
+
|
|
104
|
+
async append (teamId, projectId, path, data) {
|
|
105
|
+
const fullPath = join(rootPath, resolvePath(teamId, projectId, path))
|
|
106
|
+
if (fs.existsSync(fullPath)) {
|
|
107
|
+
try {
|
|
108
|
+
fs.mkdirSync(dirname(fullPath), {
|
|
109
|
+
recursive: true
|
|
110
|
+
})
|
|
111
|
+
fs.appendFileSync(fullPath, data)
|
|
112
|
+
} catch (error) {
|
|
113
|
+
throw cleanError(error, path, 'open')
|
|
114
|
+
}
|
|
115
|
+
} else {
|
|
116
|
+
await this.save(teamId, projectId, path, data)
|
|
117
|
+
}
|
|
118
|
+
},
|
|
119
|
+
|
|
120
|
+
async read (teamId, projectId, path) {
|
|
121
|
+
const fullPath = join(rootPath, resolvePath(teamId, projectId, path))
|
|
122
|
+
try {
|
|
123
|
+
return fs.readFileSync(fullPath)
|
|
124
|
+
} catch (error) {
|
|
125
|
+
throw cleanError(error, path, 'open')
|
|
126
|
+
}
|
|
127
|
+
},
|
|
128
|
+
|
|
129
|
+
async delete (teamId, projectId, path) {
|
|
130
|
+
const fullPath = join(rootPath, resolvePath(teamId, projectId, path))
|
|
131
|
+
try {
|
|
132
|
+
fs.rmSync(fullPath)
|
|
133
|
+
} catch (error) {
|
|
134
|
+
throw cleanError(error, path, 'unlink')
|
|
135
|
+
}
|
|
136
|
+
},
|
|
137
|
+
|
|
138
|
+
async quota (teamId, projectId) {
|
|
139
|
+
return readDirSize(join(rootPath, teamId, projectId))
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
const { join, sep: pathSeparator } = require('path')
|
|
2
|
+
|
|
3
|
+
const canary = 'ROOT_DIR_CANARY'
|
|
4
|
+
const storage = {}
|
|
5
|
+
|
|
6
|
+
function resolvePath (teamId, projectId, path) {
|
|
7
|
+
let resolvedPath
|
|
8
|
+
const minPath = join(canary, teamId, projectId, path) // limit traversal to /teamid/projectId/**
|
|
9
|
+
if (path.startsWith(pathSeparator)) {
|
|
10
|
+
resolvedPath = join(canary, teamId, path)
|
|
11
|
+
} else {
|
|
12
|
+
resolvedPath = join(canary, teamId, projectId, path)
|
|
13
|
+
}
|
|
14
|
+
if (resolvedPath.startsWith(minPath)) {
|
|
15
|
+
const array = resolvedPath.split(pathSeparator)
|
|
16
|
+
array.shift()
|
|
17
|
+
resolvedPath = array.join(pathSeparator)
|
|
18
|
+
return resolvedPath
|
|
19
|
+
} else {
|
|
20
|
+
const err = new Error('Invalid Path')
|
|
21
|
+
err.code = 'ENOTDIR'
|
|
22
|
+
throw err
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
module.exports = function (app) {
|
|
27
|
+
return {
|
|
28
|
+
get rootPath () {
|
|
29
|
+
return ''
|
|
30
|
+
},
|
|
31
|
+
resolvePath,
|
|
32
|
+
|
|
33
|
+
async ensureDir (teamId, projectId, path) {
|
|
34
|
+
try {
|
|
35
|
+
resolvePath(teamId, projectId, path)
|
|
36
|
+
return true
|
|
37
|
+
} catch (error) {
|
|
38
|
+
return false
|
|
39
|
+
}
|
|
40
|
+
},
|
|
41
|
+
|
|
42
|
+
async save (teamId, projectId, path, data) {
|
|
43
|
+
storage[resolvePath(teamId, projectId, path)] = data
|
|
44
|
+
},
|
|
45
|
+
|
|
46
|
+
async append (teamId, projectId, path, data) {
|
|
47
|
+
if (storage[resolvePath(teamId, projectId, path)]) {
|
|
48
|
+
storage[resolvePath(teamId, projectId, path)] += data
|
|
49
|
+
} else {
|
|
50
|
+
storage[resolvePath(teamId, projectId, path)] = data
|
|
51
|
+
}
|
|
52
|
+
},
|
|
53
|
+
|
|
54
|
+
async read (teamId, projectId, path) {
|
|
55
|
+
return storage[resolvePath(teamId, projectId, path)]
|
|
56
|
+
},
|
|
57
|
+
|
|
58
|
+
async delete (teamId, projectId, path) {
|
|
59
|
+
delete storage[resolvePath(teamId, projectId, path)]
|
|
60
|
+
},
|
|
61
|
+
|
|
62
|
+
async quota (teamId, projectId) {
|
|
63
|
+
let used = 0
|
|
64
|
+
Object.keys(storage).forEach(key => {
|
|
65
|
+
if (key.startsWith(join(teamId, projectId))) {
|
|
66
|
+
used += storage[key].length
|
|
67
|
+
}
|
|
68
|
+
})
|
|
69
|
+
return used
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
const canary = 'ROOT_DIR_CANARY'
|
|
2
|
+
const { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand, ListObjectsCommand, HeadObjectCommand } = require('@aws-sdk/client-s3')
|
|
3
|
+
const { join, isAbsolute, sep: pathSeparator, parse } = require('path')
|
|
4
|
+
|
|
5
|
+
function resolvePath (teamId, projectId, path) {
|
|
6
|
+
let resolvedPath
|
|
7
|
+
if (isAbsolute(path)) {
|
|
8
|
+
const pp = parse(path)
|
|
9
|
+
const hasWinDrive = /[a-z|A-Z]:/.test(pp.root)
|
|
10
|
+
if (hasWinDrive) {
|
|
11
|
+
path = path.replace(':', '')
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
const minPath = join(canary, teamId, projectId, path) // limit traversal to /teamid/projectId/**
|
|
15
|
+
if (path.startsWith(pathSeparator)) {
|
|
16
|
+
resolvedPath = join(canary, teamId, path)
|
|
17
|
+
} else {
|
|
18
|
+
resolvedPath = join(canary, teamId, projectId, path)
|
|
19
|
+
}
|
|
20
|
+
if (resolvedPath.startsWith(minPath)) {
|
|
21
|
+
const array = resolvedPath.split(pathSeparator)
|
|
22
|
+
array.shift()
|
|
23
|
+
resolvedPath = array.join(pathSeparator)
|
|
24
|
+
return resolvedPath
|
|
25
|
+
} else {
|
|
26
|
+
const err = new Error('Invalid Path')
|
|
27
|
+
err.code = 'ENOTDIR'
|
|
28
|
+
throw err
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
module.exports = function (app) {
|
|
33
|
+
const options = app.config.driver.options
|
|
34
|
+
const bucketID = options.bucket
|
|
35
|
+
delete options.bucket
|
|
36
|
+
const s3 = new S3Client(options)
|
|
37
|
+
|
|
38
|
+
return {
|
|
39
|
+
get rootPath () {
|
|
40
|
+
return ''
|
|
41
|
+
},
|
|
42
|
+
resolvePath,
|
|
43
|
+
async ensureDir (teamId, projectId, path) {
|
|
44
|
+
const resolvedPath = resolvePath(teamId, projectId, path)
|
|
45
|
+
await s3.send(new ListObjectsCommand({
|
|
46
|
+
Bucket: bucketID,
|
|
47
|
+
Prefix: resolvedPath
|
|
48
|
+
}))
|
|
49
|
+
return true
|
|
50
|
+
},
|
|
51
|
+
async save (teamId, projectId, path, data) {
|
|
52
|
+
const resolvedPath = resolvePath(teamId, projectId, path)
|
|
53
|
+
await s3.send(new PutObjectCommand({
|
|
54
|
+
Bucket: bucketID,
|
|
55
|
+
Key: resolvedPath,
|
|
56
|
+
Body: data
|
|
57
|
+
}))
|
|
58
|
+
},
|
|
59
|
+
async append (teamId, projectId, path, data) {
|
|
60
|
+
const resolvedPath = resolvePath(teamId, projectId, path)
|
|
61
|
+
|
|
62
|
+
try {
|
|
63
|
+
await s3.send(new HeadObjectCommand({
|
|
64
|
+
Bucket: bucketID,
|
|
65
|
+
Key: resolvedPath
|
|
66
|
+
}))
|
|
67
|
+
// file exists so load it and append
|
|
68
|
+
try {
|
|
69
|
+
const file = await s3.send(new GetObjectCommand({
|
|
70
|
+
Bucket: bucketID,
|
|
71
|
+
Key: resolvedPath
|
|
72
|
+
}))
|
|
73
|
+
const stream = file.Body
|
|
74
|
+
const body = await (new Promise((resolve, reject) => {
|
|
75
|
+
const chunks = []
|
|
76
|
+
stream.on('data', chunk => chunks.push(chunk))
|
|
77
|
+
stream.once('end', () => resolve(Buffer.concat(chunks)))
|
|
78
|
+
stream.once('error', reject)
|
|
79
|
+
}))
|
|
80
|
+
// I HATE this as it's basically double the file size in memory
|
|
81
|
+
const newBody = Buffer.concat([body, data])
|
|
82
|
+
await this.save(teamId, projectId, path, newBody)
|
|
83
|
+
} catch (err) {
|
|
84
|
+
if (err.type === 'NoSuchKey') {
|
|
85
|
+
const error = new Error(`ENOENT: no such file or directory, open '${path}'`)
|
|
86
|
+
error.code = 'ENOENT'
|
|
87
|
+
throw error
|
|
88
|
+
} else {
|
|
89
|
+
throw err
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
} catch (err) {
|
|
93
|
+
await this.save(teamId, projectId, path, data)
|
|
94
|
+
}
|
|
95
|
+
},
|
|
96
|
+
async read (teamId, projectId, path) {
|
|
97
|
+
const resolvedPath = resolvePath(teamId, projectId, path)
|
|
98
|
+
try {
|
|
99
|
+
const file = await s3.send(new GetObjectCommand({
|
|
100
|
+
Bucket: bucketID,
|
|
101
|
+
Key: resolvedPath
|
|
102
|
+
}))
|
|
103
|
+
const stream = file.Body
|
|
104
|
+
return new Promise((resolve, reject) => {
|
|
105
|
+
const chunks = []
|
|
106
|
+
stream.on('data', chunk => chunks.push(chunk))
|
|
107
|
+
stream.once('end', () => resolve(Buffer.concat(chunks)))
|
|
108
|
+
stream.once('error', reject)
|
|
109
|
+
})
|
|
110
|
+
} catch (err) {
|
|
111
|
+
if (err.Code === 'NoSuchKey') {
|
|
112
|
+
const error = new Error(`ENOENT: no such file or directory, open '${path}'`)
|
|
113
|
+
error.code = 'ENOENT'
|
|
114
|
+
throw error
|
|
115
|
+
} else {
|
|
116
|
+
throw err
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
},
|
|
120
|
+
async delete (teamId, projectId, path) {
|
|
121
|
+
const resolvedPath = resolvePath(teamId, projectId, path)
|
|
122
|
+
await s3.send(new DeleteObjectCommand({
|
|
123
|
+
Bucket: bucketID,
|
|
124
|
+
Key: resolvedPath
|
|
125
|
+
}))
|
|
126
|
+
},
|
|
127
|
+
async quota (teamId, projectId) {
|
|
128
|
+
const objects = await s3.send(new ListObjectsCommand({
|
|
129
|
+
Bucket: bucketID,
|
|
130
|
+
Prefix: join(teamId, projectId)
|
|
131
|
+
}))
|
|
132
|
+
|
|
133
|
+
let size = 0
|
|
134
|
+
if (objects.Contents) {
|
|
135
|
+
objects.Contents.forEach(file => {
|
|
136
|
+
size += file.Size
|
|
137
|
+
})
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
return size
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
module.exports = function (app, theDriver, teamId, projectId) {
|
|
2
|
+
return {
|
|
3
|
+
get rootPath () {
|
|
4
|
+
return theDriver.rootPath
|
|
5
|
+
},
|
|
6
|
+
|
|
7
|
+
resolvePath: theDriver.resolvePath,
|
|
8
|
+
|
|
9
|
+
async ensureDir (dirName) {
|
|
10
|
+
return await theDriver.ensureDir(teamId, projectId, dirName)
|
|
11
|
+
},
|
|
12
|
+
|
|
13
|
+
async save (path, data) {
|
|
14
|
+
return await theDriver.save(teamId, projectId, path, data)
|
|
15
|
+
},
|
|
16
|
+
|
|
17
|
+
async append (path, data) {
|
|
18
|
+
return await theDriver.append(teamId, projectId, path, data)
|
|
19
|
+
},
|
|
20
|
+
|
|
21
|
+
async read (path) {
|
|
22
|
+
return await theDriver.read(teamId, projectId, path)
|
|
23
|
+
},
|
|
24
|
+
|
|
25
|
+
async delete (path) {
|
|
26
|
+
return await theDriver.delete(teamId, projectId, path)
|
|
27
|
+
},
|
|
28
|
+
|
|
29
|
+
async quota () {
|
|
30
|
+
return await theDriver.quota(teamId, projectId)
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
const fastify = require('fastify')
|
|
2
|
+
const auth = require('./auth')
|
|
3
|
+
const config = require('./config')
|
|
4
|
+
const driver = require('./driver')
|
|
5
|
+
const routes = require('./routes')
|
|
6
|
+
const helmet = require('@fastify/helmet')
|
|
7
|
+
|
|
8
|
+
module.exports = async (options = {}) => {
|
|
9
|
+
const runtimeConfig = config.init(options)
|
|
10
|
+
const loggerConfig = {
|
|
11
|
+
formatters: {
|
|
12
|
+
level: (label) => {
|
|
13
|
+
return { level: label.toUpperCase() }
|
|
14
|
+
},
|
|
15
|
+
bindings: (bindings) => {
|
|
16
|
+
return { }
|
|
17
|
+
}
|
|
18
|
+
},
|
|
19
|
+
timestamp: require('pino').stdTimeFunctions.isoTime,
|
|
20
|
+
level: runtimeConfig.logging.level,
|
|
21
|
+
serializers: {
|
|
22
|
+
res (reply) {
|
|
23
|
+
return {
|
|
24
|
+
statusCode: reply.statusCode,
|
|
25
|
+
request: {
|
|
26
|
+
url: reply.request?.raw?.url,
|
|
27
|
+
method: reply.request?.method,
|
|
28
|
+
remoteAddress: reply.request?.ip,
|
|
29
|
+
remotePort: reply.request?.socket.remotePort
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
if (runtimeConfig.logging.pretty !== false) {
|
|
36
|
+
loggerConfig.transport = {
|
|
37
|
+
target: 'pino-pretty',
|
|
38
|
+
options: {
|
|
39
|
+
translateTime: "UTC:yyyy-mm-dd'T'HH:MM:ss.l'Z'",
|
|
40
|
+
ignore: 'pid,hostname',
|
|
41
|
+
singleLine: true
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const server = fastify({
|
|
47
|
+
bodyLimit: 10 * 1024 * 1024, // Limit set to 10MB,
|
|
48
|
+
maxParamLength: 500,
|
|
49
|
+
trustProxy: true,
|
|
50
|
+
logger: loggerConfig
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
if (runtimeConfig.telemetry?.backend?.prometheus?.enabled) {
|
|
54
|
+
const metricsPlugin = require('fastify-metrics')
|
|
55
|
+
await server.register(metricsPlugin, { endpoint: '/metrics' })
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
server.addHook('onError', async (request, reply, error) => {
|
|
59
|
+
// Useful for debugging when a route goes wrong
|
|
60
|
+
// console.log(error.stack)
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
try {
|
|
64
|
+
// Config
|
|
65
|
+
await server.register(config.attach, options)
|
|
66
|
+
|
|
67
|
+
// // Setup DB
|
|
68
|
+
// await server.register(db, {})
|
|
69
|
+
|
|
70
|
+
// // Setup settings
|
|
71
|
+
// await server.register(settings, {})
|
|
72
|
+
|
|
73
|
+
// Authentication Handler
|
|
74
|
+
await server.register(auth, {})
|
|
75
|
+
|
|
76
|
+
// HTTP Server setup
|
|
77
|
+
await server.register(helmet, {
|
|
78
|
+
global: true,
|
|
79
|
+
hidePoweredBy: true,
|
|
80
|
+
hsts: false,
|
|
81
|
+
frameguard: {
|
|
82
|
+
action: 'deny'
|
|
83
|
+
}
|
|
84
|
+
})
|
|
85
|
+
|
|
86
|
+
// Driver
|
|
87
|
+
await server.register(driver, {})
|
|
88
|
+
|
|
89
|
+
// Routes
|
|
90
|
+
await server.register(routes, { logLevel: server.config.logging.http })
|
|
91
|
+
|
|
92
|
+
server.ready()
|
|
93
|
+
|
|
94
|
+
return server
|
|
95
|
+
} catch (err) {
|
|
96
|
+
server.log.error(`Failed to start: ${err.toString()}`)
|
|
97
|
+
throw err
|
|
98
|
+
}
|
|
99
|
+
}
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Persistent Context API
|
|
3
|
+
*
|
|
4
|
+
* @namespace context
|
|
5
|
+
* @memberof forge.fileserver
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
/** @typedef {import('fastify')} Fastify */
|
|
9
|
+
/** @typedef {import('fastify').FastifyReply} FastifyReply */
|
|
10
|
+
/** @typedef {import('fastify').FastifyRequest} FastifyRequest */
|
|
11
|
+
|
|
12
|
+
module.exports = async function (app, opts, done) {
|
|
13
|
+
const driver = require(`../context-driver/${app.config.context.type}`)
|
|
14
|
+
|
|
15
|
+
await driver.init(app)
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Create/Update key
|
|
19
|
+
*
|
|
20
|
+
* @name /v1/context/:projectId/:scope
|
|
21
|
+
* @static
|
|
22
|
+
* @memberof forge.fileserver.context
|
|
23
|
+
*/
|
|
24
|
+
app.post('/:projectId/:scope', {
|
|
25
|
+
schema: {
|
|
26
|
+
body: {
|
|
27
|
+
type: 'array',
|
|
28
|
+
items: {
|
|
29
|
+
type: 'object',
|
|
30
|
+
properties: {
|
|
31
|
+
key: { type: 'string' },
|
|
32
|
+
value: {}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}, async (request, reply) => {
|
|
38
|
+
const body = request.body
|
|
39
|
+
const projectId = request.params.projectId
|
|
40
|
+
const scope = request.params.scope
|
|
41
|
+
try {
|
|
42
|
+
await driver.set(projectId, scope, body, false, request.quota?.context)
|
|
43
|
+
reply.code(200).send({})
|
|
44
|
+
} catch (error) {
|
|
45
|
+
let statusCode = error.statusCode || 400
|
|
46
|
+
if (error.code === 'over_quota') {
|
|
47
|
+
statusCode = 413
|
|
48
|
+
}
|
|
49
|
+
reply.code(statusCode).send({ error: error.message, code: error.code, limit: error.limit })
|
|
50
|
+
}
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Write full scope value from cache
|
|
55
|
+
*
|
|
56
|
+
* @name /v1/context/:projectId/cache/:scope
|
|
57
|
+
* @static
|
|
58
|
+
* @memberof forge.fileserver.context
|
|
59
|
+
*/
|
|
60
|
+
app.post('/:projectId/cache/:scope', {
|
|
61
|
+
schema: {
|
|
62
|
+
body: {
|
|
63
|
+
type: 'object'
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}, async (request, reply) => {
|
|
67
|
+
const body = request.body || {}
|
|
68
|
+
const data = []
|
|
69
|
+
for (const key in body) {
|
|
70
|
+
data.push({ key, value: body[key] })
|
|
71
|
+
}
|
|
72
|
+
const projectId = request.params.projectId
|
|
73
|
+
const scope = request.params.scope
|
|
74
|
+
try {
|
|
75
|
+
await driver.set(projectId, scope, data, true, request.quota?.context)
|
|
76
|
+
reply.code(200).send({})
|
|
77
|
+
} catch (error) {
|
|
78
|
+
let statusCode = error.statusCode || 400
|
|
79
|
+
if (error.code === 'over_quota') {
|
|
80
|
+
statusCode = 413
|
|
81
|
+
}
|
|
82
|
+
reply.code(statusCode).send({ error: error.message, code: error.code, limit: error.limit })
|
|
83
|
+
}
|
|
84
|
+
})
|
|
85
|
+
|
|
86
|
+
/**
|
|
87
|
+
* Get key
|
|
88
|
+
*
|
|
89
|
+
* @name /v1/context/:projectId/:scope?
|
|
90
|
+
* @static
|
|
91
|
+
* @memberof forge.fileserver.context
|
|
92
|
+
*/
|
|
93
|
+
app.get('/:projectId/:scope', {
|
|
94
|
+
schema: {
|
|
95
|
+
query: {
|
|
96
|
+
type: 'object',
|
|
97
|
+
properties: {
|
|
98
|
+
key: {
|
|
99
|
+
type: 'array',
|
|
100
|
+
items: { type: 'string' }
|
|
101
|
+
}
|
|
102
|
+
},
|
|
103
|
+
required: ['key']
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}, async (request, reply) => {
|
|
107
|
+
const keys = request.query.key
|
|
108
|
+
const projectId = request.params.projectId
|
|
109
|
+
try {
|
|
110
|
+
reply.send(await driver.get(projectId, request.params.scope, keys))
|
|
111
|
+
} catch (error) {
|
|
112
|
+
reply.code(400).send(error)
|
|
113
|
+
}
|
|
114
|
+
})
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Get all context
|
|
118
|
+
* An endpoint to handle the request to get context
|
|
119
|
+
* for loading the cache
|
|
120
|
+
*
|
|
121
|
+
* @name /v1/context/:projectId/cache
|
|
122
|
+
* @static
|
|
123
|
+
* @memberof forge.fileserver.context
|
|
124
|
+
*/
|
|
125
|
+
app.get('/:projectId/cache', {
|
|
126
|
+
|
|
127
|
+
}, async (request, reply) => {
|
|
128
|
+
const projectId = request.params.projectId
|
|
129
|
+
const paginationOptions = app.getPaginationOptions(request, { limit: 30 })
|
|
130
|
+
try {
|
|
131
|
+
reply.send(await driver.getAll(projectId, paginationOptions))
|
|
132
|
+
} catch (error) {
|
|
133
|
+
reply.code(400).send(error)
|
|
134
|
+
}
|
|
135
|
+
})
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Get keys
|
|
139
|
+
*
|
|
140
|
+
* @name /v1/context/:projectId/:scope/keys
|
|
141
|
+
* @static
|
|
142
|
+
* @memberof forge.fileserver.context
|
|
143
|
+
*/
|
|
144
|
+
app.get('/:projectId/:scope/keys', {
|
|
145
|
+
|
|
146
|
+
}, async (request, reply) => {
|
|
147
|
+
const projectId = request.params.projectId
|
|
148
|
+
try {
|
|
149
|
+
reply.send(await driver.keys(projectId, request.params.scope))
|
|
150
|
+
} catch (error) {
|
|
151
|
+
reply.code(400).send(error)
|
|
152
|
+
}
|
|
153
|
+
})
|
|
154
|
+
|
|
155
|
+
/**
|
|
156
|
+
* Delete scope
|
|
157
|
+
*
|
|
158
|
+
* @name /v1/context/:projectId/:scope
|
|
159
|
+
* @static
|
|
160
|
+
* @memberof forge.fileserver.context
|
|
161
|
+
*/
|
|
162
|
+
app.delete('/:projectId/:scope', {
|
|
163
|
+
|
|
164
|
+
}, async (request, reply) => {
|
|
165
|
+
// delete store[request.params.projectId][request.params.store]
|
|
166
|
+
const projectId = request.params.projectId
|
|
167
|
+
await driver.delete(projectId, request.params.scope)
|
|
168
|
+
reply.send()
|
|
169
|
+
})
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Clean up
|
|
173
|
+
*
|
|
174
|
+
* @name /v1/context/:projectId
|
|
175
|
+
* @static
|
|
176
|
+
* @memberof forge.fileserver.context
|
|
177
|
+
*/
|
|
178
|
+
app.post('/:projectId/clean', {
|
|
179
|
+
|
|
180
|
+
}, async (request, reply) => {
|
|
181
|
+
await driver.clean(request.params.projectId, request.body)
|
|
182
|
+
reply.send()
|
|
183
|
+
})
|
|
184
|
+
|
|
185
|
+
done()
|
|
186
|
+
}
|