@flowfuse/file-server 1.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,93 @@
1
+ const fs = require('fs')
2
+ const fp = require('fastify-plugin')
3
+ const path = require('path')
4
+ const YAML = require('yaml')
5
+
6
+ let config = {}
7
+
8
+ module.exports = {
9
+ init: (opts) => {
10
+ if (opts.config) {
11
+ // A custom config has been passed in. This means we're running
12
+ // programmatically rather than manually. At this stage, that
13
+ // means its our test framework.
14
+ process.env.NODE_ENV = 'development'
15
+ process.env.FLOWFORGE_HOME = process.cwd()
16
+ } else if (!process.env.FLOWFORGE_HOME) {
17
+ if (process.env.NODE_ENV === 'development') {
18
+ process.env.FLOWFORGE_HOME = path.resolve(__dirname, '..')
19
+ } else {
20
+ process.env.FLOWFORGE_HOME = process.cwd()
21
+ if (fs.existsSync('/opt/flowforge-file-storage')) {
22
+ process.env.FLOWFORGE_HOME = '/opt/flowforge-file-storage'
23
+ } else {
24
+ process.env.FLOWFORGE_HOME = process.cwd()
25
+ }
26
+ }
27
+ }
28
+
29
+ let ffVersion
30
+ if (process.env.npm_package_version) {
31
+ ffVersion = process.env.npm_package_version
32
+ // npm start
33
+ } else {
34
+ // everything else
35
+ const { version } = require(path.join(module.parent.path, '..', 'package.json'))
36
+ ffVersion = version
37
+ }
38
+ try {
39
+ fs.statSync(path.join(__dirname, '..', '..', '.git'))
40
+ ffVersion += '-git'
41
+ } catch (err) {
42
+ // No git directory
43
+ }
44
+
45
+ if (opts.config !== undefined) {
46
+ // Programmatically provided config - eg tests
47
+ config = { ...opts.config }
48
+ } else {
49
+ let configFile = path.join(process.env.FLOWFORGE_HOME, '/etc/flowforge-storage.yml')
50
+ if (fs.existsSync(path.join(process.env.FLOWFORGE_HOME, '/etc/flowforge-storage.local.yml'))) {
51
+ configFile = path.join(process.env.FLOWFORGE_HOME, '/etc/flowforge-storage.local.yml')
52
+ }
53
+ try {
54
+ const configFileContent = fs.readFileSync(configFile, 'utf-8')
55
+ config = YAML.parse(configFileContent)
56
+ config.configFile = configFile
57
+ } catch (err) {
58
+ throw new Error(`Failed to read config file ${configFile}: ${err}`)
59
+ }
60
+ }
61
+
62
+ config.version = ffVersion
63
+ config.home = process.env.FLOWFORGE_HOME
64
+ config.port = process.env.PORT || config.port || 3001
65
+ config.host = config.host || 'localhost'
66
+
67
+ config.version = ffVersion
68
+ const defaultLogging = {
69
+ level: 'info',
70
+ http: 'warn',
71
+ pretty: process.env.NODE_ENV === 'development'
72
+ }
73
+ config.logging = { ...defaultLogging, ...config.logging }
74
+
75
+ return config
76
+ },
77
+ attach: fp(async function (app, opts, next) {
78
+ Object.freeze(config)
79
+ app.decorate('config', config)
80
+
81
+ if (process.env.NODE_ENV === 'development') {
82
+ app.log.info('Development mode')
83
+ }
84
+ app.log.info(`FlowFuse File Storage v${config.ffVersion}`)
85
+ app.log.info(`FlowFuse File Storage running with NodeJS ${process.version}`)
86
+ app.log.info(`FlowFuse File Storage HOME Directory: ${process.env.FLOWFORGE_HOME}`)
87
+ if (!opts.config) {
88
+ app.log.info(`Config File: ${config.configFile}`)
89
+ }
90
+
91
+ next()
92
+ })
93
+ }
@@ -0,0 +1,53 @@
1
+ const util = require('@node-red/util').util
2
+
3
+ /**
4
+ * Gets a property of an object.
5
+ *
6
+ * Given the object:
7
+ *
8
+ * {
9
+ * "pet": {
10
+ * "type": "cat"
11
+ * }
12
+ * }
13
+ *
14
+ * - `pet.type` will return `"cat"`.
15
+ * - `pet.name` will return `undefined`
16
+ * - `pet.properties.this.that` will return `undefined`
17
+ * - `car` will return `undefined`
18
+ * - `car.type` will return `undefined`
19
+ *
20
+ * @param {Object} object - the object
21
+ * @param {String} path - the property expression
22
+ * @return {any} the object property, or undefined if it does not exist
23
+ */
24
+ function getObjectProperty (object, path) {
25
+ const msgPropParts = util.normalisePropertyExpression(path, object)
26
+ return msgPropParts.reduce((obj, key) =>
27
+ (obj && obj[key] !== 'undefined') ? obj[key] : undefined, object)
28
+ }
29
+
30
+ /**
31
+ * Gets the size of an object.
32
+ * @param {Object} blob - the object
33
+ * @return {Number} the size of the object
34
+ */
35
+ function getItemSize (blob) {
36
+ let size = 0
37
+ if (blob === null) {
38
+ return 1
39
+ } else if (typeof blob === 'undefined') {
40
+ return 0
41
+ }
42
+ if (typeof blob === 'string') {
43
+ size = blob.length
44
+ } else {
45
+ size = size + JSON.stringify(blob).length
46
+ }
47
+ return size
48
+ }
49
+
50
+ module.exports = {
51
+ getObjectProperty,
52
+ getItemSize
53
+ }
@@ -0,0 +1,401 @@
1
+ const { Sequelize, DataTypes, where, fn, col, Op } = require('sequelize')
2
+ const { getObjectProperty, getItemSize } = require('./quotaTools')
3
+ const util = require('@node-red/util').util
4
+ const path = require('path')
5
+
6
+ let sequelize, app
7
+
8
+ module.exports = {
9
+ init: async function (_app) {
10
+ app = _app
11
+ const dbOptions = {
12
+ dialect: app.config.context.options.type || 'sqlite',
13
+ logging: !!app.config.context.options.logging
14
+ }
15
+
16
+ if (dbOptions.dialect === 'sqlite') {
17
+ let filename = app.config.context.options.storage || 'context.db'
18
+ if (filename !== ':memory:') {
19
+ if (!path.isAbsolute(filename)) {
20
+ filename = path.join(app.config.home, 'var', filename)
21
+ }
22
+ dbOptions.storage = filename
23
+ dbOptions.retry = {
24
+ match: [
25
+ /SQLITE_BUSY/
26
+ ],
27
+ name: 'query',
28
+ max: 10
29
+ }
30
+ dbOptions.pool = {
31
+ maxactive: 1,
32
+ max: 5,
33
+ min: 0,
34
+ idle: 2000
35
+ }
36
+ }
37
+ } else if (dbOptions.dialect === 'postgres') {
38
+ dbOptions.host = app.config.context.options.host || 'postgres'
39
+ dbOptions.port = app.config.context.options.port || 5432
40
+ dbOptions.username = app.config.context.options.username
41
+ dbOptions.password = app.config.context.options.password
42
+ dbOptions.database = app.config.context.options.database || 'ff-context'
43
+ }
44
+
45
+ sequelize = new Sequelize(dbOptions)
46
+
47
+ app.log.info(`FlowForge File Server Sequelize Context connected to ${dbOptions.dialect} on ${dbOptions.host || dbOptions.storage}`)
48
+
49
+ const Context = sequelize.define('Context', {
50
+ project: { type: DataTypes.STRING, allowNull: false, unique: 'context-project-scope-unique' },
51
+ scope: { type: DataTypes.STRING, allowNull: false, unique: 'context-project-scope-unique' },
52
+ values: { type: DataTypes.JSON, allowNull: false }
53
+ })
54
+ await sequelize.sync()
55
+ this.Context = Context
56
+ },
57
+ /**
58
+ * Set the context data for a given scope
59
+ * @param {string} projectId - The project id
60
+ * @param {string} scope - The context scope to write to
61
+ * @param {[{key:string, value:any}]} input - The context data to write
62
+ * @param {boolean} [overwrite=false] - If true, any context data will be overwritten (i.e. for a cache dump). If false, the context data will be merged with the existing data.
63
+ * @param {number} quotaOverride - if set overrides the locally configured limit
64
+ */
65
+ set: async function (projectId, scope, input, overwrite = false, quotaOverride = 0) {
66
+ const { path } = parseScope(scope)
67
+ await sequelize.transaction({
68
+ type: Sequelize.Transaction.TYPES.IMMEDIATE
69
+ },
70
+ async (t) => {
71
+ // get the existing row of context data from the database (if any)
72
+ let existingRow = await this.Context.findOne({
73
+ where: {
74
+ project: projectId,
75
+ scope: path
76
+ },
77
+ lock: t.LOCK.UPDATE,
78
+ transaction: t
79
+ })
80
+ const quotaLimit = quotaOverride || app.config?.context?.quota || 0
81
+ // if quota is set, check if we are over quota or will be after this update
82
+ if (quotaLimit > 0) {
83
+ // Difficulties implementing this correctly
84
+ // - The final size of data can only be determined after the data is stored.
85
+ // This is due to the fact that some keys may be deleted and some may be added
86
+ // and the size of the data is not the same as the size of the keys.
87
+ // This implementation is not ideal, but it is a good approximation and will
88
+ // prevent the possibility of runaway storage usage.
89
+ let changeSize = 0
90
+ let hasValues = false
91
+ // if we are overwriting, then we need to remove the existing size to get the final size
92
+ if (existingRow) {
93
+ if (overwrite) {
94
+ changeSize -= getItemSize(existingRow.values || '')
95
+ } else {
96
+ hasValues = existingRow?.values && Object.keys(existingRow.values).length > 0
97
+ }
98
+ }
99
+ // calculate the change in size
100
+ for (const element of input) {
101
+ const currentItem = hasValues ? getObjectProperty(existingRow.values, element.key) : undefined
102
+ if (currentItem === undefined && element.value !== undefined) {
103
+ // this is an addition
104
+ changeSize += getItemSize(element.value)
105
+ } else if (currentItem !== undefined && element.value === undefined) {
106
+ // this is an deletion
107
+ changeSize -= getItemSize(currentItem)
108
+ } else {
109
+ // this is an update
110
+ changeSize -= getItemSize(currentItem)
111
+ changeSize += getItemSize(element.value)
112
+ }
113
+ }
114
+ // only calculate the current size if we are going to need it
115
+ if (changeSize >= 0) {
116
+ const currentSize = await this.quota(projectId)
117
+ if (currentSize + changeSize > quotaLimit) {
118
+ const err = new Error('Over Quota')
119
+ err.code = 'over_quota'
120
+ err.error = err.message
121
+ err.limit = quotaLimit
122
+ throw err
123
+ }
124
+ }
125
+ }
126
+
127
+ // if we are overwriting, then we need to reset the values in the existing row (if any)
128
+ if (existingRow && overwrite) {
129
+ existingRow.values = {} // reset the values since this is a mem cache -> DB dump
130
+ }
131
+
132
+ // if there is no input, then we are probably deleting the row
133
+ if (input?.length > 0) {
134
+ if (!existingRow) {
135
+ existingRow = await this.Context.create({
136
+ project: projectId,
137
+ scope: path,
138
+ values: {}
139
+ },
140
+ {
141
+ transaction: t
142
+ })
143
+ }
144
+ for (const i in input) {
145
+ const path = input[i].key
146
+ const value = input[i].value
147
+ util.setMessageProperty(existingRow.values, path, value)
148
+ }
149
+ }
150
+ if (existingRow) {
151
+ if (existingRow.values && Object.keys(existingRow.values).length === 0) {
152
+ await existingRow.destroy({ transaction: t })
153
+ } else {
154
+ existingRow.changed('values', true)
155
+ await existingRow.save({ transaction: t })
156
+ }
157
+ }
158
+ })
159
+ },
160
+ /**
161
+ * Get the context data for a given scope
162
+ * @param {string} projectId - The project id
163
+ * @param {string} scope - The context scope to read from
164
+ * @param {[string]} keys - The context keys to read
165
+ * @returns {[{key:string, value?:any}]} - The context data
166
+ */
167
+ get: async function (projectId, scope, keys) {
168
+ const { path } = parseScope(scope)
169
+ const row = await this.Context.findOne({
170
+ attributes: ['values'],
171
+ where: {
172
+ project: projectId,
173
+ scope: path
174
+ }
175
+ })
176
+ const values = []
177
+ if (row) {
178
+ const data = row.values
179
+ keys.forEach(key => {
180
+ try {
181
+ const value = util.getObjectProperty(data, key)
182
+ values.push({
183
+ key,
184
+ value
185
+ })
186
+ } catch (err) {
187
+ if (err.code === 'INVALID_EXPR') {
188
+ throw err
189
+ }
190
+ values.push({
191
+ key
192
+ })
193
+ }
194
+ })
195
+ }
196
+ return values
197
+ },
198
+ /**
199
+ * Get all context values for a project
200
+ * @param {string} projectId The project id
201
+ * @param {object} pagination The pagination settings
202
+ * @param {number} pagination.limit The maximum number of rows to return
203
+ * @param {string} pagination.cursor The cursor to start from
204
+ * @returns {[{scope: string, values: object}]}
205
+ */
206
+ getAll: async function (projectId, pagination = {}) {
207
+ const where = { project: projectId }
208
+ const limit = parseInt(pagination.limit) || 1000
209
+ const rows = await this.Context.findAll({
210
+ attributes: ['id', 'scope', 'values'],
211
+ where: buildPaginationSearchClause(pagination, where),
212
+ order: [['id', 'ASC']],
213
+ limit
214
+ })
215
+ const count = await this.Context.count({ where })
216
+ const data = rows?.map(row => {
217
+ const dataRow = { scope: row.dataValues.scope, values: row.dataValues.values }
218
+ const { scope } = parseScope(dataRow.scope)
219
+ dataRow.scope = scope
220
+ return dataRow
221
+ })
222
+ return {
223
+ meta: {
224
+ next_cursor: rows.length === limit ? rows[rows.length - 1].id : undefined
225
+ },
226
+ count,
227
+ data
228
+ }
229
+ },
230
+ keys: async function (projectId, scope) {
231
+ const { path } = parseScope(scope)
232
+ const row = await this.Context.findOne({
233
+ attributes: ['values'],
234
+ where: {
235
+ project: projectId,
236
+ scope: path
237
+ }
238
+ })
239
+ if (row) {
240
+ return Object.keys(row.values)
241
+ } else {
242
+ return []
243
+ }
244
+ },
245
+ delete: async function (projectId, scope) {
246
+ const { path } = parseScope(scope)
247
+ const existing = await this.Context.findOne({
248
+ where: {
249
+ project: projectId,
250
+ scope: path
251
+ }
252
+ })
253
+ if (existing) {
254
+ await existing.destroy()
255
+ }
256
+ },
257
+ clean: async function (projectId, activeIds) {
258
+ activeIds = activeIds || []
259
+ const scopesResults = await this.Context.findAll({
260
+ where: {
261
+ project: projectId
262
+ }
263
+ })
264
+ const scopes = scopesResults.map(s => s.scope)
265
+ if (scopes.includes('global')) {
266
+ scopes.splice(scopes.indexOf('global'), 1)
267
+ }
268
+ if (scopes.length === 0) {
269
+ return
270
+ }
271
+ const keepFlows = []
272
+ const keepNodes = []
273
+ for (const id of activeIds) {
274
+ for (const scope of scopes) {
275
+ if (scope.startsWith(`${id}.flow`)) {
276
+ keepFlows.push(scope)
277
+ } else if (scope.endsWith(`.nodes.${id}`)) {
278
+ keepNodes.push(scope)
279
+ }
280
+ }
281
+ }
282
+
283
+ for (const scope of scopes) {
284
+ if (keepFlows.includes(scope) || keepNodes.includes(scope)) {
285
+ continue
286
+ } else {
287
+ const r = await this.Context.findOne({
288
+ where: {
289
+ project: projectId,
290
+ scope
291
+ }
292
+ })
293
+ r && await r.destroy()
294
+ }
295
+ }
296
+ },
297
+ quota: async function (projectId) {
298
+ const scopesResults = await this.Context.findAll({
299
+ where: {
300
+ project: projectId
301
+ }
302
+ })
303
+ let size = 0
304
+ scopesResults.forEach(scope => {
305
+ const strValues = JSON.stringify(scope.values)
306
+ size += strValues.length
307
+ })
308
+ return size
309
+ }
310
+ }
311
+
312
+ /**
313
+ * Parse a scope string into its parts
314
+ * @param {String} scope the scope to parse, passed in from node-red or the database
315
+ */
316
+ function parseScope (scope) {
317
+ let type, path
318
+ let flow = null
319
+ let node = null
320
+ if (scope === 'global') {
321
+ type = 'global'
322
+ path = 'global'
323
+ } else if (scope.indexOf('.nodes.') > -1) {
324
+ // node context (db scope format <flowId>.nodes.<nodeId>)
325
+ const parts = scope.split('.nodes.')
326
+ type = 'node'
327
+ flow = '' + parts[0]
328
+ node = '' + parts[1]
329
+ scope = `${node}:${flow}`
330
+ path = scope
331
+ } else if (scope.endsWith('.flow')) {
332
+ // flow context (db scope format <flowId>.flow)
333
+ path = scope
334
+ flow = scope.replace('.flow', '')
335
+ scope = flow
336
+ type = 'flow'
337
+ } else if (scope.indexOf(':') > -1) {
338
+ // node context (node-red scope format <nodeId>:<flowId>)
339
+ const parts = scope.split(':')
340
+ type = 'node'
341
+ flow = '' + parts[1]
342
+ node = '' + parts[0]
343
+ path = `${flow}.nodes.${node}`
344
+ } else {
345
+ // flow context
346
+ type = 'flow'
347
+ path = `${scope}.flow`
348
+ }
349
+ return { type, scope, path, flow, node }
350
+ }
351
+
352
+ /**
353
+ * Generate a properly formed where-object for sequelize findAll, that applies
354
+ * the required pagination, search and filter logic
355
+ *
356
+ * @param {Object} params the pagination options - cursor, query, limit
357
+ * @param {Object} whereClause any pre-existing where-query clauses to include
358
+ * @param {Array<String>} columns an array of column names to search.
359
+ * @returns a `where` object that can be passed to sequelize query
360
+ */
361
+ function buildPaginationSearchClause (params, whereClause = {}, columns = [], filterMap = {}) {
362
+ whereClause = { ...whereClause }
363
+ if (params.cursor) {
364
+ whereClause.id = { [Op.gt]: params.cursor }
365
+ }
366
+ whereClause = {
367
+ [Op.and]: [
368
+ whereClause
369
+ ]
370
+ }
371
+
372
+ for (const [key, value] of Object.entries(filterMap)) {
373
+ if (Object.hasOwn(params, key)) {
374
+ // A filter has been provided for key
375
+ let clauseContainer = whereClause[Op.and]
376
+ let param = params[key]
377
+ if (Array.isArray(param)) {
378
+ if (param.length > 1) {
379
+ clauseContainer = []
380
+ whereClause[Op.and].push({ [Op.or]: clauseContainer })
381
+ }
382
+ } else {
383
+ param = [param]
384
+ }
385
+ param.forEach(p => {
386
+ clauseContainer.push(where(fn('lower', col(value)), p.toLowerCase()))
387
+ })
388
+ }
389
+ }
390
+ if (params.query && columns.length) {
391
+ const searchTerm = `%${params.query.toLowerCase()}%`
392
+ const searchClauses = columns.map(colName => {
393
+ return where(fn('lower', col(colName)), { [Op.like]: searchTerm })
394
+ })
395
+ const query = {
396
+ [Op.or]: searchClauses
397
+ }
398
+ whereClause[Op.and].push(query)
399
+ }
400
+ return whereClause
401
+ }
@@ -0,0 +1,19 @@
1
+ const fp = require('fastify-plugin')
2
+ const getDriver = require('./drivers/vfs.js')
3
+
4
+ module.exports = fp(async function (app, opts, done) {
5
+ const Driver = require('./drivers/' + app.config.driver.type)
6
+ try {
7
+ app.decorate('_driver', new Driver(app))
8
+ app.decorateRequest('vfs', null)
9
+ app.addHook('onRequest', (req, reply, done) => {
10
+ const teamId = req.params.teamId
11
+ const projectId = req.params.projectId
12
+ req.vfs = getDriver(app, app._driver, teamId, projectId)
13
+ done()
14
+ })
15
+ } catch (err) {
16
+ console.log(err)
17
+ }
18
+ done()
19
+ })