@es-labs/jslib 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +4 -0
- package/README.md +42 -0
- package/__test__/services.test.js +32 -0
- package/auth/index.js +226 -0
- package/auth/keyv.js +23 -0
- package/auth/knex.js +29 -0
- package/auth/redis.js +23 -0
- package/comms/email.js +123 -0
- package/comms/nexmo.js +44 -0
- package/comms/telegram.js +43 -0
- package/comms/telegram2/inbound.js +314 -0
- package/comms/telegram2/outbound.js +574 -0
- package/comms/webpush.js +60 -0
- package/config.js +37 -0
- package/express/controller/auth/oauth.js +39 -0
- package/express/controller/auth/oidc.js +87 -0
- package/express/controller/auth/own.js +100 -0
- package/express/controller/auth/saml.js +74 -0
- package/express/upload.js +48 -0
- package/index.js +1 -0
- package/iso/README.md +4 -0
- package/iso/__tests__/csv-utils.spec.js +128 -0
- package/iso/__tests__/datetime.spec.js +101 -0
- package/iso/__tests__/fetch.spec.js +270 -0
- package/iso/csv-utils.js +206 -0
- package/iso/datetime.js +103 -0
- package/iso/fetch.js +129 -0
- package/iso/fetch2.js +180 -0
- package/iso/log-filter.js +17 -0
- package/iso/sleep.js +6 -0
- package/iso/ws.js +63 -0
- package/node/oss-files/oss-uploader-client-fetch.js +258 -0
- package/node/oss-files/oss-uploader-client-fetch.md +31 -0
- package/node/oss-files/oss-uploader-client.js +219 -0
- package/node/oss-files/oss-uploader-server.js +199 -0
- package/node/oss-files/oss-uploader-usage.js +121 -0
- package/node/oss-files/oss-uploader-usage.md +34 -0
- package/node/oss-files/s3-uploader-client.js +217 -0
- package/node/oss-files/s3-uploader-server.js +123 -0
- package/node/oss-files/s3-uploader-usage.js +77 -0
- package/node/oss-files/s3-uploader-usage.md +34 -0
- package/package.json +53 -0
- package/packageInfo.js +9 -0
- package/services/ali.js +279 -0
- package/services/aws.js +194 -0
- package/services/db/__tests__/keyv.spec.js +31 -0
- package/services/db/keyv.js +14 -0
- package/services/db/knex.js +67 -0
- package/services/db/redis.js +51 -0
- package/services/index.js +57 -0
- package/services/mq/README.md +8 -0
- package/services/websocket.js +139 -0
- package/t4t/README.md +1 -0
- package/traps.js +20 -0
- package/utils/__tests__/aes.spec.js +52 -0
- package/utils/aes.js +23 -0
- package/web/UI.md +71 -0
- package/web/bwc-autocomplete.js +211 -0
- package/web/bwc-combobox.js +343 -0
- package/web/bwc-fileupload.js +87 -0
- package/web/bwc-loading-overlay.js +54 -0
- package/web/bwc-t4t-form.js +511 -0
- package/web/bwc-table.js +756 -0
- package/web/fetch.js +129 -0
- package/web/i18n.js +24 -0
- package/web/idle.js +49 -0
- package/web/parse-jwt.js +15 -0
- package/web/pwa.js +84 -0
- package/web/sign-pad.js +164 -0
- package/web/t4t-fe.js +164 -0
- package/web/util.js +126 -0
- package/web/web-cam.js +182 -0
package/services/ali.js
ADDED
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
// Aliyun OSS interface - https://github.com/ali-sdk/ali-oss
|
|
2
|
+
// suitable for files that are not large... limit to 10Mb file size
|
|
3
|
+
//
|
|
4
|
+
// res.status = 204 no content but still success
|
|
5
|
+
// res.status = 200
|
|
6
|
+
// res.statusMessage = 200
|
|
7
|
+
// TDB signatureUrlV4 & usage
|
|
8
|
+
|
|
9
|
+
import 'dotenv/config.js' // node --env-file .env
|
|
10
|
+
import OSS from 'ali-oss'
|
|
11
|
+
|
|
12
|
+
const { OSS_AK_ID, OSS_AK_SECRET, OSS_REGION, OSS_BUCKET } = process.env
|
|
13
|
+
|
|
14
|
+
const store = (OSS_AK_ID && OSS_AK_SECRET && OSS_REGION) ? new OSS({
|
|
15
|
+
region: OSS_REGION,
|
|
16
|
+
accessKeyId: OSS_AK_ID,
|
|
17
|
+
accessKeySecret: OSS_AK_SECRET,
|
|
18
|
+
bucket: OSS_BUCKET,
|
|
19
|
+
}) : null
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* get count of objects in a bucket.
|
|
23
|
+
* @param {{ bucketName: string }}
|
|
24
|
+
* @returns {{ status: Number, count: Number }}
|
|
25
|
+
*/
|
|
26
|
+
const countBucketObjects = async (bucketName = null) => {
|
|
27
|
+
try {
|
|
28
|
+
const result = await store.getBucketStat(bucketName)
|
|
29
|
+
return { status: 200, count: result?.stat?.ObjectCount }
|
|
30
|
+
} catch (e) {
|
|
31
|
+
// status 404, code: NoSuchBucket
|
|
32
|
+
return { status: e.status, count: 0 }
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* A point on a two dimensional plane.
|
|
38
|
+
* @typedef {Object} ListV2Object
|
|
39
|
+
* @property {string} name - e.g. arco/a5.wav
|
|
40
|
+
* @property {string} url - e.g. http://my-bucket.oss-ap-southeast-1.aliyuncs.com/test/a5.wav
|
|
41
|
+
* @property {string} lastModified - e.g. 2024-09-03T02:34:24.000Z
|
|
42
|
+
* @property {string} etag - e.g. "3A627F876FD033F4B5CB81F063F9F883"
|
|
43
|
+
* @property {string} type - e.g. Normal
|
|
44
|
+
* @property {Number} size - e.g. 948328
|
|
45
|
+
* @property {string} storageClass - e.g. Standard
|
|
46
|
+
* @property {string} owner - e.g. null
|
|
47
|
+
*/
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* get count of objects in a bucket.
|
|
51
|
+
* @param {{ prefix: string, maxKeys: Number }}
|
|
52
|
+
* @returns {{ status: Number, statusMessage: string, [objects]: ListV2Object[] }}
|
|
53
|
+
*/
|
|
54
|
+
const listObjects = async ({ prefix = '', maxKeys = 10 } = {}) => {
|
|
55
|
+
// console.log(prefix, maxKeys)
|
|
56
|
+
try {
|
|
57
|
+
const result = await store.listV2({
|
|
58
|
+
prefix,
|
|
59
|
+
'max-keys': maxKeys
|
|
60
|
+
})
|
|
61
|
+
// console.log(result.res)
|
|
62
|
+
const { status, statusMessage } = result.res
|
|
63
|
+
return {
|
|
64
|
+
status,
|
|
65
|
+
statusMessage,
|
|
66
|
+
objects: result.objects
|
|
67
|
+
}
|
|
68
|
+
} catch (e) {
|
|
69
|
+
console.log('ali - listObjects', e)
|
|
70
|
+
return { status: 500, statusMessage: e.toString() }
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* put object in a bucket.
|
|
76
|
+
* @param {string} key - the object key - e.g. test/hello.txt
|
|
77
|
+
* @param {string|Buffer|ReadableStream} payload - file data
|
|
78
|
+
* @returns {{ status: Number, statusMessage: string }}
|
|
79
|
+
*/
|
|
80
|
+
const putObject = async (key, payload) => {
|
|
81
|
+
// if (!store) return null
|
|
82
|
+
try {
|
|
83
|
+
const result = await store.put(key, payload) //
|
|
84
|
+
// console.log(result)
|
|
85
|
+
const { status, statusMessage } = result.res
|
|
86
|
+
return { status, statusMessage }
|
|
87
|
+
} catch (e) {
|
|
88
|
+
console.log('ali - putObject', e)
|
|
89
|
+
return { status: 500, statusMessage: e.toString() }
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* get object.
|
|
95
|
+
* @param {string} key - the object key - e.g. test/hello.txt
|
|
96
|
+
* @param {string|Buffer|ReadableStream} payload - file data
|
|
97
|
+
* @returns {{ status: Number, statusMessage: string, [buffer]: Buffer }}
|
|
98
|
+
*/
|
|
99
|
+
const getObject = async (key) => {
|
|
100
|
+
// if (!store) return null
|
|
101
|
+
try {
|
|
102
|
+
const result = await store.get(key)
|
|
103
|
+
// console.log(typeof result.content, result.content.toString('utf-8')) // content is Buffer object
|
|
104
|
+
// console.log(Buffer.isBuffer(result.content))
|
|
105
|
+
// console.log(result)
|
|
106
|
+
const { status, statusMessage} = result?.res || {}
|
|
107
|
+
return {
|
|
108
|
+
status,
|
|
109
|
+
statusMessage,
|
|
110
|
+
buffer: result?.content
|
|
111
|
+
}
|
|
112
|
+
} catch (e) {
|
|
113
|
+
console.log('ali - getObject', e)
|
|
114
|
+
return { status: 500, statusMessage: e.toString() }
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* get object.
|
|
120
|
+
* @param {string[]} keys - the object keys - e.g. ['test/hello.txt','abc/d123.txt']
|
|
121
|
+
* @returns {{ status: Number, statusMessage: string, [deleted]: { Key: string }[] }}
|
|
122
|
+
*/
|
|
123
|
+
const deleteObjects = async (keys) => {
|
|
124
|
+
// if (!store) return null
|
|
125
|
+
try {
|
|
126
|
+
const result = await store.deleteMulti(keys, {})
|
|
127
|
+
// console.log(result)
|
|
128
|
+
const { status, statusMessage } = result?.res || {}
|
|
129
|
+
return {
|
|
130
|
+
status,
|
|
131
|
+
statusMessage,
|
|
132
|
+
deleted: result.deleted
|
|
133
|
+
}
|
|
134
|
+
} catch (e) {
|
|
135
|
+
console.log('ali - deleteObjects', e)
|
|
136
|
+
return { status: 500, statusMessage: e.toString() }
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// (method, expires[, request, objectName, additionalHeaders])
|
|
141
|
+
/**
|
|
142
|
+
* get signed URL.
|
|
143
|
+
* @param {string} method - GET or PUT
|
|
144
|
+
* @param {number} expires - expiration in seconds
|
|
145
|
+
* @param {string} key - the object key - e.g. test/hello.txt
|
|
146
|
+
*/
|
|
147
|
+
const getSignedUrl = async (method, expires, key, headers = null, additional = null) => {
|
|
148
|
+
const signedUrl = await store.signatureUrlV4(method, expires, headers, key, additional)
|
|
149
|
+
console.log(signedUrl)
|
|
150
|
+
return signedUrl
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* get a signed URL to write a file to OSS.
|
|
155
|
+
* @param {string} directory - the directory in OSS.
|
|
156
|
+
* @param {string} filename - the filename
|
|
157
|
+
* @param {string} contentType - the content type of the file
|
|
158
|
+
* @param {string} [action='write'] - 'write' or 'read'
|
|
159
|
+
* @param {number} [expiration=7200] - the expiration time of the signed URL
|
|
160
|
+
* @param {object} [callbackConfig] - the callback config for write action, example: { callback_url: 'https://example.com', body: { foo: 'bar' } }
|
|
161
|
+
* @returns {{ url: string, error?: string }}
|
|
162
|
+
*/
|
|
163
|
+
const getUploadURL = async (directory, filename, contentType, action = 'write', expiration = 7200, callbackConfig) => {
|
|
164
|
+
if (!action || !filename) {
|
|
165
|
+
return { error: 'filename and action required' }
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
try {
|
|
169
|
+
let url
|
|
170
|
+
|
|
171
|
+
// write / new file action
|
|
172
|
+
if (action === 'write') {
|
|
173
|
+
let arr = filename.split('.')
|
|
174
|
+
|
|
175
|
+
arr[0] = crypto
|
|
176
|
+
.createHash('sha256')
|
|
177
|
+
.update(arr[0] + new Date().getTime())
|
|
178
|
+
.digest('hex')
|
|
179
|
+
|
|
180
|
+
let newFilename = arr.join('.')
|
|
181
|
+
|
|
182
|
+
const fullPath = directory ? `${directory}/${newFilename}` : newFilename
|
|
183
|
+
|
|
184
|
+
url = await store.signatureUrl(fullPath, {
|
|
185
|
+
expires: expiration,
|
|
186
|
+
method: 'PUT',
|
|
187
|
+
'Content-Type': contentType,
|
|
188
|
+
callback: {
|
|
189
|
+
url: callbackConfig?.callback_url,
|
|
190
|
+
body: JSON.stringify({...callbackConfig?.body, filename, filepath: fullPath}),
|
|
191
|
+
contentType: 'application/json'
|
|
192
|
+
}
|
|
193
|
+
})
|
|
194
|
+
} else {
|
|
195
|
+
url = await store.signatureUrl(filename)
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
return { url }
|
|
199
|
+
} catch (e) {
|
|
200
|
+
return { error: e.toString() }
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
const test = async () => {
|
|
205
|
+
// [bucket count]
|
|
206
|
+
// const bucketObjCount = await countBucketObjects('no-such-bucket') // non-existing bucket, also test with existing bucket
|
|
207
|
+
// console.log('bucketObjCount', bucketObjCount) // -1 if error ?
|
|
208
|
+
|
|
209
|
+
// [put] - if put same object name will replace...
|
|
210
|
+
// const testFile1 = new File(['Hello, world 1!'], 'hello.txt', { type: 'text/plain' })
|
|
211
|
+
// const testData1 = await testFile1.arrayBuffer()
|
|
212
|
+
// const putRes1 = await putObject('hello1.txt', Buffer.from(testData1))
|
|
213
|
+
// console.log('putRes1', putRes1)
|
|
214
|
+
|
|
215
|
+
// const testFile2 = new File(['Hello, world 2!'], 'hello.txt', { type: 'text/plain' })
|
|
216
|
+
// const testData2 = await testFile2.arrayBuffer()
|
|
217
|
+
// const putRes2 = await putObject('hello2.txt', Buffer.from(testData2))
|
|
218
|
+
// console.log('putRes2', putRes2)
|
|
219
|
+
|
|
220
|
+
// [list objects]
|
|
221
|
+
// const listRes = await listObjects({ prefix: 'hello' })
|
|
222
|
+
// console.log('listRes.objects', listRes?.objects?.length, listRes?.objects?.map(item => item.name))
|
|
223
|
+
|
|
224
|
+
// [get object]
|
|
225
|
+
// const data1 = await getObject('hello1.txt')
|
|
226
|
+
// console.log('data1', data1?.buffer?.toString())
|
|
227
|
+
// const data2 = await getObject('hello2.txt')
|
|
228
|
+
// console.log('data2', data2?.buffer?.toString())
|
|
229
|
+
|
|
230
|
+
// [delete objects]
|
|
231
|
+
// const deleteRes = await deleteObjects(['ahello1.txt', 'ahello2.txt', 'ahello3.txt'])
|
|
232
|
+
// console.log(deleteRes)
|
|
233
|
+
|
|
234
|
+
const url = await getSignedUrl('GET', 60, 'hello1.txt')
|
|
235
|
+
/*
|
|
236
|
+
// -------------------------------------------------
|
|
237
|
+
// PutObject
|
|
238
|
+
const putObejctUrl = await store.signatureUrlV4('PUT', 60, undefined, 'your obejct name');
|
|
239
|
+
console.log(putObejctUrl);
|
|
240
|
+
// --------------------------------------------------
|
|
241
|
+
const putObejctUrl = await store.signatureUrlV4(
|
|
242
|
+
'PUT',
|
|
243
|
+
60,
|
|
244
|
+
{
|
|
245
|
+
headers: {
|
|
246
|
+
'Content-Type': 'text/plain',
|
|
247
|
+
'Content-MD5': 'xxx',
|
|
248
|
+
'Content-Length': 1
|
|
249
|
+
}
|
|
250
|
+
},
|
|
251
|
+
'your obejct name',
|
|
252
|
+
['Content-Length']
|
|
253
|
+
);
|
|
254
|
+
console.log(putObejctUrl);
|
|
255
|
+
*/
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
// test()
|
|
259
|
+
|
|
260
|
+
// async function listBucketInventory() {
|
|
261
|
+
// const bucket = 'Your Bucket Name'
|
|
262
|
+
// let nextContinuationToken
|
|
263
|
+
// // list all inventory of the bucket
|
|
264
|
+
// do {
|
|
265
|
+
// const result = await store.listBucketInventory(bucket, nextContinuationToken)
|
|
266
|
+
// console.log(result.inventoryList)
|
|
267
|
+
// nextContinuationToken = result.nextContinuationToken
|
|
268
|
+
// } while (nextContinuationToken)
|
|
269
|
+
// }
|
|
270
|
+
// listBucketInventory()
|
|
271
|
+
export {
|
|
272
|
+
countBucketObjects,
|
|
273
|
+
listObjects,
|
|
274
|
+
putObject,
|
|
275
|
+
getObject,
|
|
276
|
+
deleteObjects,
|
|
277
|
+
getSignedUrl,
|
|
278
|
+
getUploadURL
|
|
279
|
+
}
|
package/services/aws.js
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
// TBD not used yet
|
|
2
|
+
import {
|
|
3
|
+
S3Client,
|
|
4
|
+
S3ServiceException,
|
|
5
|
+
// This command supersedes the ListObjectsCommand and is the recommended way to list objects.
|
|
6
|
+
paginateListObjectsV2,
|
|
7
|
+
} from "@aws-sdk/client-s3";
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Log all of the object keys in a bucket.
|
|
11
|
+
* @param {{ bucketName: string, pageSize: string }}
|
|
12
|
+
*/
|
|
13
|
+
export const main = async ({ bucketName, pageSize }) => {
|
|
14
|
+
const client = new S3Client({});
|
|
15
|
+
/** @type {string[][]} */
|
|
16
|
+
const objects = [];
|
|
17
|
+
try {
|
|
18
|
+
const paginator = paginateListObjectsV2(
|
|
19
|
+
{ client, /* Max items per page */ pageSize: Number.parseInt(pageSize) },
|
|
20
|
+
{ Bucket: bucketName },
|
|
21
|
+
);
|
|
22
|
+
|
|
23
|
+
for await (const page of paginator) {
|
|
24
|
+
objects.push(page.Contents.map((o) => o.Key));
|
|
25
|
+
}
|
|
26
|
+
objects.forEach((objectList, pageNum) => {
|
|
27
|
+
console.log(
|
|
28
|
+
`Page ${pageNum + 1}\n------\n${objectList.map((o) => `• ${o}`).join("\n")}\n`,
|
|
29
|
+
);
|
|
30
|
+
});
|
|
31
|
+
} catch (caught) {
|
|
32
|
+
if (
|
|
33
|
+
caught instanceof S3ServiceException &&
|
|
34
|
+
caught.name === "NoSuchBucket"
|
|
35
|
+
) {
|
|
36
|
+
console.error(
|
|
37
|
+
`Error from S3 while listing objects for "${bucketName}". The bucket doesn't exist.`,
|
|
38
|
+
);
|
|
39
|
+
} else if (caught instanceof S3ServiceException) {
|
|
40
|
+
console.error(
|
|
41
|
+
`Error from S3 while listing objects for "${bucketName}". ${caught.name}: ${caught.message}`,
|
|
42
|
+
);
|
|
43
|
+
} else {
|
|
44
|
+
throw caught;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
/*
|
|
52
|
+
|
|
53
|
+
// https://docs.aws.amazon.com/AmazonS3/latest/userguide/example_s3_Scenario_PresignedUrl_section.html
|
|
54
|
+
// https://docs.aws.amazon.com/AmazonS3/latest/userguide/mpuoverview.html
|
|
55
|
+
// https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/javascript_s3_code_examples.html#basics
|
|
56
|
+
|
|
57
|
+
import https from "https";
|
|
58
|
+
import { PutObjectCommand, S3Client } from "@aws-sdk/client-s3";
|
|
59
|
+
import { fromIni } from "@aws-sdk/credential-providers";
|
|
60
|
+
import { HttpRequest } from "@smithy/protocol-http";
|
|
61
|
+
import {
|
|
62
|
+
getSignedUrl,
|
|
63
|
+
S3RequestPresigner,
|
|
64
|
+
} from "@aws-sdk/s3-request-presigner";
|
|
65
|
+
import { parseUrl } from "@smithy/url-parser";
|
|
66
|
+
import { formatUrl } from "@aws-sdk/util-format-url";
|
|
67
|
+
import { Hash } from "@smithy/hash-node";
|
|
68
|
+
|
|
69
|
+
const createPresignedUrlWithoutClient = async ({ region, bucket, key }) => {
|
|
70
|
+
const url = parseUrl(`https://${bucket}.s3.${region}.amazonaws.com/${key}`);
|
|
71
|
+
const presigner = new S3RequestPresigner({
|
|
72
|
+
credentials: fromIni(),
|
|
73
|
+
region,
|
|
74
|
+
sha256: Hash.bind(null, "sha256"),
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
const signedUrlObject = await presigner.presign(
|
|
78
|
+
new HttpRequest({ ...url, method: "PUT" }),
|
|
79
|
+
);
|
|
80
|
+
return formatUrl(signedUrlObject);
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
const createPresignedUrlWithClient = ({ region, bucket, key }) => {
|
|
84
|
+
const client = new S3Client({ region });
|
|
85
|
+
const command = new PutObjectCommand({ Bucket: bucket, Key: key });
|
|
86
|
+
return getSignedUrl(client, command, { expiresIn: 3600 });
|
|
87
|
+
};
|
|
88
|
+
|
|
89
|
+
function put(url, data) {
|
|
90
|
+
return new Promise((resolve, reject) => {
|
|
91
|
+
const req = https.request(
|
|
92
|
+
url,
|
|
93
|
+
{ method: "PUT", headers: { "Content-Length": new Blob([data]).size } },
|
|
94
|
+
(res) => {
|
|
95
|
+
let responseBody = "";
|
|
96
|
+
res.on("data", (chunk) => {
|
|
97
|
+
responseBody += chunk;
|
|
98
|
+
});
|
|
99
|
+
res.on("end", () => {
|
|
100
|
+
resolve(responseBody);
|
|
101
|
+
});
|
|
102
|
+
},
|
|
103
|
+
);
|
|
104
|
+
req.on("error", (err) => {
|
|
105
|
+
reject(err);
|
|
106
|
+
});
|
|
107
|
+
req.write(data);
|
|
108
|
+
req.end();
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
export const main = async () => {
|
|
113
|
+
const REGION = "us-east-1";
|
|
114
|
+
const BUCKET = "example_bucket";
|
|
115
|
+
const KEY = "example_file.txt";
|
|
116
|
+
|
|
117
|
+
// There are two ways to generate a presigned URL.
|
|
118
|
+
// 1. Use createPresignedUrl without the S3 client.
|
|
119
|
+
// 2. Use getSignedUrl in conjunction with the S3 client and GetObjectCommand.
|
|
120
|
+
try {
|
|
121
|
+
const noClientUrl = await createPresignedUrlWithoutClient({
|
|
122
|
+
region: REGION,
|
|
123
|
+
bucket: BUCKET,
|
|
124
|
+
key: KEY,
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
const clientUrl = await createPresignedUrlWithClient({
|
|
128
|
+
region: REGION,
|
|
129
|
+
bucket: BUCKET,
|
|
130
|
+
key: KEY,
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
// After you get the presigned URL, you can provide your own file
|
|
134
|
+
// data. Refer to put() above.
|
|
135
|
+
console.log("Calling PUT using presigned URL without client");
|
|
136
|
+
await put(noClientUrl, "Hello World");
|
|
137
|
+
|
|
138
|
+
console.log("Calling PUT using presigned URL with client");
|
|
139
|
+
await put(clientUrl, "Hello World");
|
|
140
|
+
|
|
141
|
+
console.log("\nDone. Check your S3 console.");
|
|
142
|
+
} catch (err) {
|
|
143
|
+
console.error(err);
|
|
144
|
+
}
|
|
145
|
+
};
|
|
146
|
+
*/
|
|
147
|
+
|
|
148
|
+
/*
|
|
149
|
+
import {
|
|
150
|
+
DeleteObjectCommand,
|
|
151
|
+
S3Client,
|
|
152
|
+
S3ServiceException,
|
|
153
|
+
waitUntilObjectNotExists,
|
|
154
|
+
} from "@aws-sdk/client-s3";
|
|
155
|
+
|
|
156
|
+
// Delete one object from an Amazon S3 bucket.
|
|
157
|
+
// @param {{ bucketName: string, key: string }}
|
|
158
|
+
export const main = async ({ bucketName, key }) => {
|
|
159
|
+
const client = new S3Client({});
|
|
160
|
+
|
|
161
|
+
try {
|
|
162
|
+
await client.send(
|
|
163
|
+
new DeleteObjectCommand({
|
|
164
|
+
Bucket: bucketName,
|
|
165
|
+
Key: key,
|
|
166
|
+
}),
|
|
167
|
+
);
|
|
168
|
+
await waitUntilObjectNotExists(
|
|
169
|
+
{ client },
|
|
170
|
+
{ Bucket: bucketName, Key: key },
|
|
171
|
+
);
|
|
172
|
+
// A successful delete, or a delete for a non-existent object, both return
|
|
173
|
+
// a 204 response code.
|
|
174
|
+
console.log(
|
|
175
|
+
`The object "${key}" from bucket "${bucketName}" was deleted, or it didn't exist.`,
|
|
176
|
+
);
|
|
177
|
+
} catch (caught) {
|
|
178
|
+
if (
|
|
179
|
+
caught instanceof S3ServiceException &&
|
|
180
|
+
caught.name === "NoSuchBucket"
|
|
181
|
+
) {
|
|
182
|
+
console.error(
|
|
183
|
+
`Error from S3 while deleting object from ${bucketName}. The bucket doesn't exist.`,
|
|
184
|
+
);
|
|
185
|
+
} else if (caught instanceof S3ServiceException) {
|
|
186
|
+
console.error(
|
|
187
|
+
`Error from S3 while deleting object from ${bucketName}. ${caught.name}: ${caught.message}`,
|
|
188
|
+
);
|
|
189
|
+
} else {
|
|
190
|
+
throw caught;
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
};
|
|
194
|
+
*/
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
const assert = require('node:assert/strict');
|
|
2
|
+
const { describe, it, beforeEach, afterEach } = require('node:test');
|
|
3
|
+
|
|
4
|
+
const StoreKeyV = require('../keyv');
|
|
5
|
+
|
|
6
|
+
describe('StoreKeyV (keyv wrapper)', () => {
|
|
7
|
+
let store;
|
|
8
|
+
|
|
9
|
+
beforeEach(() => {
|
|
10
|
+
store = new StoreKeyV();
|
|
11
|
+
store.open();
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
afterEach(() => {
|
|
15
|
+
store.close();
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
it('should open a keyv instance and allow set/get', async () => {
|
|
19
|
+
const kv = store.get();
|
|
20
|
+
assert.ok(kv, 'keyv instance should be available');
|
|
21
|
+
|
|
22
|
+
await kv.set('test-key', 'value');
|
|
23
|
+
const got = await kv.get('test-key');
|
|
24
|
+
assert.strictEqual(got, 'value');
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
it('should clear internal reference on close', () => {
|
|
28
|
+
store.close();
|
|
29
|
+
assert.strictEqual(store.get(), null);
|
|
30
|
+
});
|
|
31
|
+
});
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { Keyv } from 'keyv'
|
|
2
|
+
|
|
3
|
+
export default class StoreKeyV {
|
|
4
|
+
constructor(options = JSON.parse(process.env.KEYV_CACHE || null) || {}) {
|
|
5
|
+
this._KEYV_CACHE = options
|
|
6
|
+
this._keyv = null
|
|
7
|
+
}
|
|
8
|
+
open () {
|
|
9
|
+
this._keyv = this._KEYV_CACHE ? new Keyv(this._KEYV_CACHE) : new Keyv()
|
|
10
|
+
this._keyv.on('error', err => console.error('keyv Connection Error', err))
|
|
11
|
+
}
|
|
12
|
+
get () { return this._keyv }
|
|
13
|
+
close () { this._keyv = null }
|
|
14
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import Knex from 'knex'
|
|
2
|
+
|
|
3
|
+
export default class StoreKnex {
|
|
4
|
+
constructor(options = JSON.parse(process.env.KNEXFILE || null) || {}) {
|
|
5
|
+
this._KNEXFILE = options
|
|
6
|
+
this._knex = null
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
async open() {
|
|
10
|
+
if (!this._KNEXFILE) console.log('KNEXFILE property empty or undefined - knex not started')
|
|
11
|
+
else {
|
|
12
|
+
try {
|
|
13
|
+
this._knex = Knex(this._KNEXFILE)
|
|
14
|
+
// sqlite, may need to use another statement with other sql dbs
|
|
15
|
+
await this._knex.raw('select 1+1 as result')
|
|
16
|
+
.then(() => console.log('knex CONNECTED'))
|
|
17
|
+
.catch(err => { console.log('DB error: ' + err.toString()) })
|
|
18
|
+
} catch (e) {
|
|
19
|
+
console.log('knex CONNECT ERROR', e.toString())
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
get () { return this._knex }
|
|
24
|
+
async close () {
|
|
25
|
+
if (this._knex) await this._knex.destroy()
|
|
26
|
+
console.log('knex closed')
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// NOSONAR
|
|
31
|
+
// Model.knex().destroy(() => {}) // returns a promise
|
|
32
|
+
// Update with your config settings.
|
|
33
|
+
// Mysql 8 issue for now
|
|
34
|
+
// ALTER USER 'user'@'%' IDENTIFIED WITH mysql_native_password BY 'user123!@#PK';
|
|
35
|
+
// FLUSH PRIVILEGES;
|
|
36
|
+
// npx knex migrate:make create_users --env development
|
|
37
|
+
// npx knex migrate:latest --env development
|
|
38
|
+
// npx knex seed:make seed_name --env development
|
|
39
|
+
// npx knex seed:run --env development
|
|
40
|
+
// migrations
|
|
41
|
+
//
|
|
42
|
+
// exports.up = function (knex, Promise) {
|
|
43
|
+
// return Promise.all([
|
|
44
|
+
// knex.schema.createTable('ideas', table => {
|
|
45
|
+
// table.increments('id').primary()
|
|
46
|
+
// table.string('idea')
|
|
47
|
+
// table.string('creator')
|
|
48
|
+
// })
|
|
49
|
+
// ])
|
|
50
|
+
// }
|
|
51
|
+
// exports.down = function (knex, Promise) {
|
|
52
|
+
// return Promise.all([
|
|
53
|
+
// knex.schema.dropTable('ideas')
|
|
54
|
+
// ])
|
|
55
|
+
// }
|
|
56
|
+
// seeds
|
|
57
|
+
// exports.seed = function (knex, Promise) {
|
|
58
|
+
// return knex('ideas').del().then(() => {
|
|
59
|
+
// return knex('ideas').insert([
|
|
60
|
+
// {creator: 'Ali', idea: 'A To Do List app!'},
|
|
61
|
+
// {creator: 'Ali', idea: 'A Blog!'},
|
|
62
|
+
// {creator: 'Ali', idea: 'A calculator'}
|
|
63
|
+
// ])
|
|
64
|
+
// })
|
|
65
|
+
// }
|
|
66
|
+
|
|
67
|
+
// https://dev.to/aspittel/objection--knex--painless-postgresql-in-your-node-app--6n6
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
// {
|
|
2
|
+
// port: 6379,
|
|
3
|
+
// host: '127.0.0.1',
|
|
4
|
+
// family: 4, // 4 (IPv4) or 6 (IPv6)
|
|
5
|
+
// password: 'auth',
|
|
6
|
+
// db: 0,
|
|
7
|
+
// // if using sentinels
|
|
8
|
+
// // sentinels: [{ host: 'localhost', port: 26379 }, { host: 'localhost', port: 26380 }],
|
|
9
|
+
// // name: 'mymaster',
|
|
10
|
+
// }
|
|
11
|
+
// var availableSlaves = [{ ip: '127.0.0.1', port: '31231', flags: 'slave' }]
|
|
12
|
+
// var preferredSlaves = [ { ip: '127.0.0.1', port: '31231', prio: 1 }, { ip: '127.0.0.1', port: '31232', prio: 2 } ]
|
|
13
|
+
// // preferredSlaves function format
|
|
14
|
+
// preferredSlaves = function(availableSlaves) {
|
|
15
|
+
// for (var i = 0; i < availableSlaves.length; i++) {
|
|
16
|
+
// var slave = availableSlaves[i]
|
|
17
|
+
// if (slave.ip === '127.0.0.1' && slave.port === '31234') return slave
|
|
18
|
+
// }
|
|
19
|
+
// // if no preferred slaves are available a random one is used
|
|
20
|
+
// return false
|
|
21
|
+
// }
|
|
22
|
+
// var redis = new Redis({
|
|
23
|
+
// sentinels: [{ host: '127.0.0.1', port: 26379 }, { host: '127.0.0.1', port: 26380 }],
|
|
24
|
+
// name: 'mymaster',
|
|
25
|
+
// role: 'slave',
|
|
26
|
+
// preferredSlaves: preferredSlaves
|
|
27
|
+
// })
|
|
28
|
+
|
|
29
|
+
import Redis from 'ioredis'
|
|
30
|
+
|
|
31
|
+
export default class StoreRedis {
|
|
32
|
+
constructor(options = JSON.parse(process.env.REDIS_CONFIG || null) || {}) {
|
|
33
|
+
this._REDIS_CONFIG = options
|
|
34
|
+
this._redis = null
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
open () {
|
|
38
|
+
const redisOpts = this._REDIS_CONFIG.opts
|
|
39
|
+
if (this._REDIS_CONFIG.retry) redisOpts.retryStrategy = (times) => Math.min(times * this._REDIS_CONFIG.retry.step, this._REDIS_CONFIG.retry.max)
|
|
40
|
+
if (this._REDIS_CONFIG.reconnect) redisOpts.reconnectOnError = (err) => err.message.includes(this._REDIS_CONFIG.reconnect.targetError) ? true : false
|
|
41
|
+
this._redis = new Redis(redisOpts)
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
get () { return this._redis }
|
|
45
|
+
close () {
|
|
46
|
+
if (this._redis) {
|
|
47
|
+
this._redis.disconnect()
|
|
48
|
+
this._redis = null
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import StoreKeyV from './db/keyv.js'
|
|
2
|
+
import StoreKnex from './db/knex.js'
|
|
3
|
+
import StoreRedis from './db/redis.js'
|
|
4
|
+
import Wss from './websocket.js'
|
|
5
|
+
// import auth from '../auth/index.js'
|
|
6
|
+
import '../auth/index.js'
|
|
7
|
+
|
|
8
|
+
let servicesConfig = []
|
|
9
|
+
const services = {}
|
|
10
|
+
|
|
11
|
+
const start = async (
|
|
12
|
+
config = JSON.parse(process.env.SERVICES_CONFIG || null) || [], server = null, app = null
|
|
13
|
+
) => {
|
|
14
|
+
const serviceTypesAvailable = process.env.SERVICES_TYPES_AVAILABLE.split(',')
|
|
15
|
+
try {
|
|
16
|
+
servicesConfig = config
|
|
17
|
+
servicesConfig.forEach(svc => {
|
|
18
|
+
const opts = JSON.parse(process.env[svc.options] || null)
|
|
19
|
+
if (opts && svc.type === 'knex' && StoreKnex) services[svc.name] = new StoreKnex(opts)
|
|
20
|
+
if (opts && svc.type === 'redis' && StoreRedis) services[svc.name] = new StoreRedis(opts)
|
|
21
|
+
if (opts && svc.type === 'keyv' && StoreKeyV) services[svc.name] = new StoreKeyV(opts)
|
|
22
|
+
if (opts && svc.type === 'ws' && Wss) services[svc.name] = new Wss(opts)
|
|
23
|
+
|
|
24
|
+
if (opts) {
|
|
25
|
+
if (svc.type === 'ws') {
|
|
26
|
+
services[svc.name].open(server, app) // set server or get app object
|
|
27
|
+
} else {
|
|
28
|
+
services[svc.name].open()
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
})
|
|
32
|
+
} catch (e) {
|
|
33
|
+
console.log(e)
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const stop = async () => {
|
|
38
|
+
// console.log('services - stop - begin')
|
|
39
|
+
try {
|
|
40
|
+
const promises = servicesConfig.map(svc => services[svc.name].close())
|
|
41
|
+
await Promise.allSettled(promises)
|
|
42
|
+
} catch (e) {
|
|
43
|
+
console.log(e.toString())
|
|
44
|
+
}
|
|
45
|
+
// console.log('services - stop - end')
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const get = (service) => services[service]?.get() || null;
|
|
49
|
+
|
|
50
|
+
const list = () => servicesConfig;
|
|
51
|
+
|
|
52
|
+
export {
|
|
53
|
+
start,
|
|
54
|
+
stop,
|
|
55
|
+
get,
|
|
56
|
+
list
|
|
57
|
+
}
|