s3db.js 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/.github/workflows/pipeline.yml +16 -0
  2. package/README.md +742 -0
  3. package/build/cache/avro.serializer.js +16 -0
  4. package/build/cache/json.serializer.js +7 -0
  5. package/build/cache/s3-cache.class.js +157 -0
  6. package/build/cache/s3-resource-cache.class.js +77 -0
  7. package/build/cache/serializers.type.js +8 -0
  8. package/build/errors.js +64 -0
  9. package/build/index.js +9 -0
  10. package/build/metadata.interface.js +2 -0
  11. package/build/plugin.interface.js +2 -0
  12. package/build/resource.class.js +485 -0
  13. package/build/resource.interface.js +2 -0
  14. package/build/s3-client.class.js +274 -0
  15. package/build/s3db-config.interface.js +2 -0
  16. package/build/s3db.class.js +185 -0
  17. package/build/stream/resource-ids-read-stream.class.js +100 -0
  18. package/build/stream/resource-ids-transformer.class.js +40 -0
  19. package/build/stream/resource-write-stream.class.js +76 -0
  20. package/build/validator.js +37 -0
  21. package/examples/1-bulk-insert.js +64 -0
  22. package/examples/2-read-stream.js +61 -0
  23. package/examples/3-read-stream-to-csv.js +57 -0
  24. package/examples/4-read-stream-to-zip.js +56 -0
  25. package/examples/5-write-stream.js +98 -0
  26. package/examples/6-jwt-tokens.js +124 -0
  27. package/examples/concerns/index.js +64 -0
  28. package/jest.config.ts +10 -0
  29. package/package.json +51 -0
  30. package/src/cache/avro.serializer.ts +12 -0
  31. package/src/cache/json.serializer.ts +4 -0
  32. package/src/cache/s3-cache.class.ts +155 -0
  33. package/src/cache/s3-resource-cache.class.ts +75 -0
  34. package/src/cache/serializers.type.ts +8 -0
  35. package/src/errors.ts +96 -0
  36. package/src/index.ts +4 -0
  37. package/src/metadata.interface.ts +4 -0
  38. package/src/plugin.interface.ts +4 -0
  39. package/src/resource.class.ts +531 -0
  40. package/src/resource.interface.ts +21 -0
  41. package/src/s3-client.class.ts +297 -0
  42. package/src/s3db-config.interface.ts +9 -0
  43. package/src/s3db.class.ts +215 -0
  44. package/src/stream/resource-ids-read-stream.class.ts +90 -0
  45. package/src/stream/resource-ids-transformer.class.ts +38 -0
  46. package/src/stream/resource-write-stream.class.ts +78 -0
  47. package/src/validator.ts +39 -0
  48. package/tests/cache.spec.ts +187 -0
  49. package/tests/concerns/index.ts +16 -0
  50. package/tests/config.spec.ts +29 -0
  51. package/tests/resources.spec.ts +197 -0
  52. package/tsconfig.json +111 -0
@@ -0,0 +1,37 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.ValidatorFactory = exports.CustomValidator = void 0;
7
+ const crypto_js_1 = __importDefault(require("crypto-js"));
8
+ const fastest_validator_1 = __importDefault(require("fastest-validator"));
9
+ class CustomValidator extends fastest_validator_1.default {
10
+ constructor(options, passphrase) {
11
+ super(options);
12
+ this.passphrase = passphrase;
13
+ }
14
+ }
15
+ exports.CustomValidator = CustomValidator;
16
+ function ValidatorFactory({ passphrase }) {
17
+ let options = {
18
+ useNewCustomCheckerFunction: true,
19
+ defaults: {
20
+ object: {
21
+ strict: "remove",
22
+ },
23
+ },
24
+ };
25
+ const validator = new CustomValidator(options, passphrase);
26
+ validator.alias("secret", {
27
+ type: "string",
28
+ custom: (v) => {
29
+ if (!validator.passphrase)
30
+ throw new Error("No passphrase defined.");
31
+ const ciphertext = crypto_js_1.default.AES.encrypt(String(v), validator.passphrase);
32
+ return ciphertext.toString();
33
+ },
34
+ });
35
+ return validator;
36
+ }
37
+ exports.ValidatorFactory = ValidatorFactory;
@@ -0,0 +1,64 @@
1
+ const { ENV, CostsPlugin, S3db } = require("./concerns");
2
+
3
+ const { nanoid } = require("nanoid");
4
+ const Fakerator = require("fakerator");
5
+ const ProgressBar = require("progress");
6
+
7
+ const TOTAL = 10
8
+
9
+ async function main() {
10
+ const fake = Fakerator();
11
+
12
+ const s3db = new S3db({
13
+ uri: ENV.CONNECTION_STRING,
14
+ passphrase: ENV.PASSPRHASE,
15
+ parallelism: ENV.PARALLELISM,
16
+ plugins: [CostsPlugin],
17
+ });
18
+
19
+ console.log(`creating ${TOTAL} leads.`);
20
+ console.log(`parallelism of ${ENV.PARALLELISM} requests.\n`);
21
+
22
+ await s3db.connect();
23
+
24
+ const barItem = new ProgressBar(
25
+ "bulk-writing :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds) [:requests requests]",
26
+ {
27
+ width: 30,
28
+ total: TOTAL,
29
+ incomplete: " ",
30
+ }
31
+ );
32
+
33
+ if (!s3db.resources.leads) {
34
+ await s3db.createResource({
35
+ resourceName: "leads",
36
+ attributes: {
37
+ name: "string",
38
+ email: "string",
39
+ token: "secret",
40
+ },
41
+ });
42
+ }
43
+
44
+ s3db.on("inserted", () =>
45
+ barItem.tick({ requests: s3db.client.costs.requests.total })
46
+ );
47
+
48
+ console.time("bulk-writing");
49
+
50
+ await s3db.resource("leads").bulkInsert(
51
+ new Array(TOTAL).fill(0).map((v, k) => ({
52
+ id: k,
53
+ name: fake.names.name(),
54
+ email: fake.internet.email(),
55
+ token: nanoid(),
56
+ }))
57
+ );
58
+
59
+ console.timeEnd("bulk-writing");
60
+ process.stdout.write("\n\n");
61
+ console.log("Total cost:", s3db.client.costs.total.toFixed(4), "USD");
62
+ }
63
+
64
+ main();
@@ -0,0 +1,61 @@
1
+ const { ENV, CostsPlugin, S3db } = require("./concerns");
2
+
3
+ const Multiprogress = require("multi-progress");
4
+
5
+ async function main() {
6
+ const s3db = new S3db({
7
+ uri: ENV.CONNECTION_STRING,
8
+ passphrase: ENV.PASSPRHASE,
9
+ parallelism: ENV.PARALLELISM,
10
+ plugins: [CostsPlugin],
11
+ });
12
+
13
+ await s3db.connect();
14
+ const total = await s3db.resource("leads").count();
15
+
16
+ console.log(`reading ${total} leads.`);
17
+ console.log(`parallelism of ${ENV.PARALLELISM} requests.\n`);
18
+
19
+ const multi = new Multiprogress(process.stdout);
20
+ const options = {
21
+ total,
22
+ width: 30,
23
+ incomplete: " ",
24
+ };
25
+
26
+ const barPages = multi.newBar(
27
+ "reading-pages :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds)",
28
+ {
29
+ ...options,
30
+ total: 1,
31
+ }
32
+ );
33
+
34
+ const barIds = multi.newBar(
35
+ "reading-ids :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds)",
36
+ options
37
+ );
38
+
39
+ const barData = multi.newBar(
40
+ "reading-data :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds)",
41
+ options
42
+ );
43
+
44
+ const stream = s3db.resource("leads").readable();
45
+
46
+ console.time("reading");
47
+
48
+ stream.on("page", () => barPages.tick());
49
+ stream.on("id", () => barIds.tick());
50
+ stream.on("data", () => barData.tick());
51
+ stream.on("error", (err) => console.error(err));
52
+
53
+ stream.on("end", () => {
54
+ process.stdout.write("\n");
55
+ console.timeEnd("reading");
56
+ process.stdout.write("\n\n");
57
+ console.log("Total cost:", s3db.client.costs.total.toFixed(4), "USD");
58
+ });
59
+ }
60
+
61
+ main();
@@ -0,0 +1,57 @@
1
+ const { ENV, S3db } = require("./concerns");
2
+
3
+ const fs = require("fs");
4
+ const ProgressBar = require("progress");
5
+ const { Transform } = require("stream");
6
+
7
+ async function main() {
8
+ const s3db = new S3db({
9
+ uri: ENV.CONNECTION_STRING,
10
+ passphrase: ENV.PASSPRHASE,
11
+ parallelism: ENV.PARALLELISM,
12
+ });
13
+
14
+ await s3db.connect();
15
+ const total = await s3db.resource("leads").count();
16
+
17
+ console.log(`reading ${total} leads.`);
18
+ console.log(`parallelism of ${ENV.PARALLELISM} requests.\n`);
19
+
20
+ const barData = new ProgressBar(
21
+ "reading-data :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds)",
22
+ {
23
+ total,
24
+ width: 30,
25
+ incomplete: " ",
26
+ }
27
+ );
28
+
29
+ const filename = __dirname + "/tmp/leads." + Date.now() + ".csv";
30
+
31
+ const stream = await s3db.resource("leads").readable();
32
+ const streamWrite = fs.createWriteStream(filename);
33
+
34
+ const transformer = new Transform({
35
+ objectMode: true,
36
+ transform(chunk, encoding, callback) {
37
+ this.push(
38
+ [chunk.id, chunk.email, chunk.name, chunk.token].join(";") + "\n"
39
+ );
40
+ callback();
41
+ },
42
+ });
43
+
44
+ console.time("reading-data");
45
+ stream.on("data", () => barData.tick());
46
+
47
+ stream.on("end", () => {
48
+ console.timeEnd("reading-data");
49
+ process.stdout.write("\n");
50
+ const { size } = fs.statSync(filename);
51
+ console.log(`\nTotal size: ${(size / (1024 * 1000)).toFixed(2)} Mb`);
52
+ });
53
+
54
+ stream.pipe(transformer).pipe(streamWrite);
55
+ }
56
+
57
+ main();
@@ -0,0 +1,56 @@
1
+ const { ENV, S3db } = require("./concerns");
2
+
3
+ const fs = require("fs");
4
+ const zlib = require("node:zlib");
5
+ const ProgressBar = require("progress");
6
+ const { Transform } = require("node:stream");
7
+ const { pipeline } = require("node:stream/promises");
8
+
9
+ async function main() {
10
+ const s3db = new S3db({
11
+ uri: ENV.CONNECTION_STRING,
12
+ passphrase: ENV.PASSPRHASE,
13
+ parallelism: ENV.PARALLELISM,
14
+ });
15
+
16
+ await s3db.connect();
17
+ const total = await s3db.resource("leads").count();
18
+
19
+ console.log(`reading ${total} leads.`);
20
+ console.log(`parallelism of ${ENV.PARALLELISM} requests.\n`);
21
+
22
+ const barData = new ProgressBar(
23
+ "reading-data :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds)",
24
+ {
25
+ total,
26
+ width: 30,
27
+ incomplete: " ",
28
+ }
29
+ );
30
+
31
+ const filename = __dirname + "/tmp/leads." + Date.now() + ".csv.gzip";
32
+ const stream = await s3db.resource("leads").readable();
33
+ const streamWrite = fs.createWriteStream(filename);
34
+
35
+ const transformer = new Transform({
36
+ objectMode: true,
37
+ transform(chunk, encoding, callback) {
38
+ this.push([chunk.id, chunk.name, chunk.token].join(";") + "\n");
39
+ callback();
40
+ },
41
+ });
42
+
43
+ console.time("reading-data");
44
+
45
+ stream.on("data", () => barData.tick());
46
+ stream.on("end", () => {
47
+ console.timeEnd("reading-data");
48
+ process.stdout.write("\n");
49
+ const { size } = fs.statSync(filename);
50
+ console.log(`\nTotal zip size: ${(size / (1024 * 1000)).toFixed(2)} Mb`);
51
+ });
52
+
53
+ pipeline(stream, transformer, zlib.createGzip(), streamWrite);
54
+ }
55
+
56
+ main();
@@ -0,0 +1,98 @@
1
+ const { ENV, S3db, CostsPlugin } = require("./concerns");
2
+
3
+ const Multiprogress = require("multi-progress");
4
+ const { pipeline } = require("stream");
5
+
6
+ async function main() {
7
+ const s3db = new S3db({
8
+ uri: ENV.CONNECTION_STRING,
9
+ passphrase: ENV.PASSPRHASE,
10
+ parallelism: ENV.PARALLELISM,
11
+ plugins: [CostsPlugin],
12
+ });
13
+
14
+ await s3db.connect();
15
+
16
+ if (!s3db.resources.copyLeads) {
17
+ await s3db.createResource({
18
+ resourceName: "copy-leads",
19
+ attributes: {
20
+ name: "string",
21
+ email: "string",
22
+ token: "secret",
23
+ },
24
+ });
25
+ }
26
+
27
+ const total = await s3db.resource("leads").count();
28
+
29
+ console.log(`reading ${total} leads.`);
30
+ console.log(`parallelism of ${ENV.PARALLELISM} requests.\n`);
31
+
32
+ const multi = new Multiprogress(process.stdout);
33
+ const options = {
34
+ total,
35
+ width: 30,
36
+ incomplete: " ",
37
+ };
38
+
39
+ const requestsBar = multi.newBar(
40
+ "requests :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds)",
41
+ {
42
+ ...options,
43
+ total: 1,
44
+ }
45
+ );
46
+
47
+ const readPages = multi.newBar(
48
+ "reading-pages :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds)",
49
+ {
50
+ ...options,
51
+ total: 1,
52
+ }
53
+ );
54
+
55
+ const readIds = multi.newBar(
56
+ "reading-ids :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds)",
57
+ options
58
+ );
59
+
60
+ const readData = multi.newBar(
61
+ "reading-data :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds)",
62
+ options
63
+ );
64
+
65
+ const writeIds = multi.newBar(
66
+ "writing-ids :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds)",
67
+ options
68
+ );
69
+
70
+ const writeData = multi.newBar(
71
+ "writing-data :current/:total (:percent) [:bar] :rate/bps :etas (:elapseds)",
72
+ options
73
+ );
74
+
75
+ const readStream = s3db.resource("leads").readable();
76
+ const writeStream = s3db.resource("copy-leads").writable();
77
+
78
+ console.time("copying-data");
79
+ s3db.client.on("request", () => requestsBar.tick());
80
+
81
+ readStream.on("page", () => readPages.tick());
82
+ readStream.on("id", () => readIds.tick());
83
+ readStream.on("data", () => readData.tick());
84
+
85
+ writeStream.on("id", () => writeIds.tick());
86
+ writeStream.on("data", () => writeData.tick());
87
+
88
+ writeStream.on("end", () => {
89
+ process.stdout.write("\n");
90
+ console.timeEnd("copying-data");
91
+ process.stdout.write("\n\n");
92
+ console.log("Total cost:", s3db.client.costs.total.toFixed(4), "USD");
93
+ });
94
+
95
+ pipeline(readStream, writeStream, (err) => console.error(err));
96
+ }
97
+
98
+ main();
@@ -0,0 +1,124 @@
1
+ const { ENV, S3db } = require("./concerns");
2
+
3
+ const jwt = require("jsonwebtoken");
4
+ const { nanoid } = require("nanoid");
5
+ const Fakerator = require("fakerator");
6
+ const sha256 = require("crypto-js/sha256");
7
+ const { take, shuffle } = require("lodash");
8
+
9
+ const fake = Fakerator();
10
+
11
+ const userFactory = () => {
12
+ const scopes = ["admin", "guest", "users:read", "tokens:read"];
13
+ const email = fake.internet.email();
14
+
15
+ return {
16
+ id: email,
17
+ email,
18
+ name: fake.names.name(),
19
+ password: nanoid(),
20
+ scopes: take(shuffle(scopes), fake.random.number(scopes.length)),
21
+ };
22
+ };
23
+
24
+ const Token = {
25
+ createToken: async (s3db, email, password) => {
26
+ const user = await s3db.resource("users").getById(email);
27
+
28
+ if (user.password !== password) {
29
+ console.log({ user, email, password });
30
+ throw new Error("invalid-user");
31
+ }
32
+
33
+ const data = {
34
+ name: user.name,
35
+ email: user.email,
36
+ scopes: user.scopes,
37
+ email_verified: true,
38
+ };
39
+
40
+ const token = jwt.sign(data, ENV.PASSPRHASE, {
41
+ expiresIn: "2s",
42
+ subject: "test",
43
+ issuer: "s3db.js",
44
+ audience: "default",
45
+ });
46
+
47
+ const decoded = jwt.decode(token, ENV.PASSPRHASE);
48
+
49
+ await s3db.resource("tokens").insert({
50
+ id: sha256(token).toString(),
51
+ ...decoded,
52
+ });
53
+
54
+ return token;
55
+ },
56
+
57
+ validateToken: async (client, token) => {
58
+ const tokenId = sha256(token).toString();
59
+
60
+ try {
61
+ const decoded = jwt.decode(token, ENV.PASSPRHASE);
62
+ const savedToken = await client.resource("tokens").getById(tokenId);
63
+
64
+ return [null, { decoded, savedToken }];
65
+ } catch (error) {
66
+ return [error];
67
+ }
68
+ },
69
+ };
70
+
71
+ async function main() {
72
+ const s3db = new S3db({
73
+ uri: ENV.CONNECTION_STRING + Date.now(),
74
+ passphrase: ENV.PASSPRHASE,
75
+ parallelism: ENV.PARALLELISM,
76
+ });
77
+
78
+ await s3db.connect();
79
+
80
+ await s3db.createResource({
81
+ resourceName: "users",
82
+ attributes: {
83
+ name: "string",
84
+ email: "string",
85
+ password: "secret",
86
+ scopes: "array|items:string",
87
+ },
88
+ });
89
+
90
+ await s3db.createResource({
91
+ resourceName: "tokens",
92
+ attributes: {
93
+ iss: ["string", "url"],
94
+ sub: "string",
95
+ aud: "string",
96
+ exp: "number",
97
+ email: "email",
98
+ name: "string",
99
+ email_verified: "boolean",
100
+ scopes: "array|items:string",
101
+ },
102
+ });
103
+
104
+ const users = new Array(5).fill(0).map(userFactory);
105
+ await s3db.resource("users").bulkInsert(users);
106
+
107
+ let tokens = [];
108
+ process.stdout.write("Created tokens: ");
109
+ for (const user of users) {
110
+ const token = await Token.createToken(s3db, user.email, user.password);
111
+ tokens.push(token);
112
+ process.stdout.write(".");
113
+ }
114
+
115
+ process.stdout.write("\nValidated tokens: ");
116
+ for (const token of tokens) {
117
+ const [error, data] = await Token.validateToken(s3db, token);
118
+ if (!error) {
119
+ process.stdout.write(".");
120
+ }
121
+ }
122
+ }
123
+
124
+ main();
@@ -0,0 +1,64 @@
1
+ require("dotenv").config({ path: `${process.cwd()}/../.env` });
2
+
3
+ const { bucket, accessKeyId, secretAccessKey } = process.env;
4
+
5
+ module.exports = {
6
+ ENV: {
7
+ PARALLELISM: 250,
8
+ PASSPRHASE: 'super-secret-leaked-fluffy-passphrase',
9
+ CONNECTION_STRING:
10
+ `s3://${accessKeyId}:${secretAccessKey}@${bucket}/databases/examples-` +
11
+ new Date().toISOString().substring(0, 10),
12
+ },
13
+
14
+ S3db: require("../../build").S3db,
15
+
16
+ CostsPlugin: {
17
+ async setup (s3db) {
18
+ this.client = s3db.client
19
+
20
+ this.client.costs = {
21
+ total: 0,
22
+
23
+ prices: {
24
+ put: 0.000005,
25
+ post: 0.000005,
26
+ copy: 0.000005,
27
+ list: 0.000005,
28
+ get: 0.0000004,
29
+ select: 0.0000004,
30
+ delete: 0.0000004,
31
+ },
32
+
33
+ requests: {
34
+ total: 0,
35
+ put: 0,
36
+ post: 0,
37
+ copy: 0,
38
+ list: 0,
39
+ get: 0,
40
+ select: 0,
41
+ delete: 0,
42
+ },
43
+ };
44
+ },
45
+
46
+ start () {
47
+ const addRequest = (req) => {
48
+ this.client.costs.requests[req]++;
49
+ this.client.costs.total += this.client.costs.prices[req];
50
+ };
51
+
52
+ this.client.on("request", (name) => {
53
+ this.client.costs.requests.total++;
54
+
55
+ if (name === "getObject") addRequest("get");
56
+ else if (name === "putObject") addRequest("put");
57
+ else if (name === "headObject") addRequest("get");
58
+ else if (name === "deleteObject") addRequest("delete");
59
+ else if (name === "deleteObjects") addRequest("delete");
60
+ else if (name === "listObjectsV2") addRequest("list");
61
+ });
62
+ }
63
+ },
64
+ };
package/jest.config.ts ADDED
@@ -0,0 +1,10 @@
1
+ import type { Config } from "@jest/types";
2
+
3
+ const config: Config.InitialOptions = {
4
+ verbose: true,
5
+ transform: {
6
+ "^.+\\.tsx?$": "ts-jest",
7
+ },
8
+ };
9
+
10
+ export default config;
package/package.json ADDED
@@ -0,0 +1,51 @@
1
+ {
2
+ "name": "s3db.js",
3
+ "version": "1.0.0",
4
+ "description": "Use AWS S3 as a database",
5
+ "main": "build/index.js",
6
+ "scripts": {
7
+ "build": "rimraf ./build && tsc",
8
+ "test": "jest --coverage",
9
+ "test:cache": "jest --coverage ./tests/cache.spec.ts",
10
+ "example:1": "cd examples; node 1-bulk-insert.js",
11
+ "example:2": "cd examples; node 2-read-stream.js",
12
+ "example:3": "cd examples; node 3-read-stream-to-csv.js",
13
+ "example:4": "cd examples; node 4-read-stream-to-zip.js",
14
+ "example:5": "cd examples; node 5-write-stream.js",
15
+ "example:6": "cd examples; node 6-jwt-tokens.js",
16
+ "coverage": "npx http-server ./coverage/lcov-report"
17
+ },
18
+ "author": "filipeforattini",
19
+ "license": "ISC",
20
+ "devDependencies": {
21
+ "@types/crypto-js": "^4.1.1",
22
+ "@types/flat": "^5.0.2",
23
+ "@types/jest": "^29.2.3",
24
+ "@types/lodash": "^4.14.190",
25
+ "@types/node": "^18.11.9",
26
+ "@types/pako": "^2.0.0",
27
+ "@types/progress": "^2.0.5",
28
+ "dotenv": "^16.0.3",
29
+ "esm": "^3.2.25",
30
+ "fakerator": "^0.3.6",
31
+ "jest": "^29.3.1",
32
+ "jsonwebtoken": "^8.5.1",
33
+ "multi-progress": "^4.0.0",
34
+ "progress": "^2.0.3",
35
+ "rimraf": "^3.0.2",
36
+ "ts-jest": "^29.0.3",
37
+ "ts-node": "^10.9.1",
38
+ "typescript": "^4.9.3"
39
+ },
40
+ "dependencies": {
41
+ "@supercharge/promise-pool": "^2.3.2",
42
+ "avsc": "^5.7.7",
43
+ "aws-sdk": "^2.1261.0",
44
+ "crypto-js": "^4.1.1",
45
+ "fastest-validator": "^1.15.0",
46
+ "flat": "^5.0.2",
47
+ "lodash": "^4.17.21",
48
+ "nanoid": "3.3.4",
49
+ "ts-mixer": "^6.0.2"
50
+ }
51
+ }
@@ -0,0 +1,12 @@
1
+ import avro from "avsc";
2
+
3
+ export const CacheAvroSchema = avro.Type.forSchema({
4
+ name: "Cache",
5
+ type: "record",
6
+ fields: [{ name: "data", type: ["string"] }],
7
+ });
8
+
9
+ export const AvroSerializer = {
10
+ serialize: (data: any) => String(CacheAvroSchema.toBuffer(data)),
11
+ unserialize: (data: any) => CacheAvroSchema.fromBuffer(Buffer.from(data)),
12
+ }
@@ -0,0 +1,4 @@
1
+ export const JsonSerializer = {
2
+ serialize: (data: any) => JSON.stringify(data),
3
+ unserialize: (data: any) => JSON.parse(data),
4
+ }