@datapos/datapos-development 0.3.89 â 0.3.91
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/datapos-development.es.js +38 -38
- package/package.json +1 -1
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import { exec as m } from "child_process";
|
|
2
|
-
import { promises as
|
|
2
|
+
import { promises as e } from "fs";
|
|
3
3
|
import { nanoid as w } from "nanoid";
|
|
4
4
|
import { promisify as y } from "util";
|
|
5
5
|
const h = ["createObject", "dropObject", "removeRecords", "upsertRecords"], v = ["findObject", "getRecord", "listNodes", "previewObject", "retrieveRecords"], g = y(m);
|
|
6
6
|
async function S() {
|
|
7
7
|
try {
|
|
8
8
|
console.info("đ Building configuration...");
|
|
9
|
-
const o = JSON.parse(await
|
|
10
|
-
o.name && (
|
|
9
|
+
const o = JSON.parse(await e.readFile("package.json", "utf8")), n = JSON.parse(await e.readFile("config.json", "utf8"));
|
|
10
|
+
o.name && (n.id = o.name.replace("@datapos/", "")), o.version && (n.version = o.version), await e.writeFile("config.json", JSON.stringify(n, void 0, 4), "utf8"), console.info("â
Configuration built.");
|
|
11
11
|
} catch (o) {
|
|
12
12
|
console.error("â Error building configuration.", o);
|
|
13
13
|
}
|
|
@@ -15,17 +15,17 @@ async function S() {
|
|
|
15
15
|
async function J(o) {
|
|
16
16
|
try {
|
|
17
17
|
console.info(`đ Building public directory index for identifier '${o}'...`);
|
|
18
|
-
const
|
|
18
|
+
const n = {};
|
|
19
19
|
async function i(r, t) {
|
|
20
20
|
console.info(`âī¸ Processing directory '${r}'...`);
|
|
21
21
|
const f = [], a = r.substring(`public/${o}`.length);
|
|
22
|
-
|
|
22
|
+
n[a] = f;
|
|
23
23
|
for (const c of t) {
|
|
24
24
|
const l = `${r}/${c}`;
|
|
25
25
|
try {
|
|
26
|
-
const d = await
|
|
26
|
+
const d = await e.stat(l);
|
|
27
27
|
if (d.isDirectory()) {
|
|
28
|
-
const p = await
|
|
28
|
+
const p = await e.readdir(l), u = { childCount: p.length, name: `${c}`, typeId: "folder" };
|
|
29
29
|
f.push(u), await i(l, p);
|
|
30
30
|
} else {
|
|
31
31
|
const p = { id: w(), lastModifiedAt: d.mtimeMs, name: c, size: d.size, typeId: "object" };
|
|
@@ -40,16 +40,16 @@ async function J(o) {
|
|
|
40
40
|
return d !== 0 ? d : c.name.localeCompare(l.name);
|
|
41
41
|
});
|
|
42
42
|
}
|
|
43
|
-
const s = await
|
|
44
|
-
await i(`public/${o}`, s), await
|
|
45
|
-
} catch (
|
|
46
|
-
console.error("â Error building public directory index.",
|
|
43
|
+
const s = await e.readdir(`public/${o}`);
|
|
44
|
+
await i(`public/${o}`, s), await e.writeFile(`./public/${o}Index.json`, JSON.stringify(n), "utf8"), console.info("â
Public directory index built.");
|
|
45
|
+
} catch (n) {
|
|
46
|
+
console.error("â Error building public directory index.", n);
|
|
47
47
|
}
|
|
48
48
|
}
|
|
49
49
|
async function j() {
|
|
50
50
|
try {
|
|
51
51
|
console.info("đ Building connector configuration...");
|
|
52
|
-
const o = JSON.parse(await
|
|
52
|
+
const o = JSON.parse(await e.readFile("package.json", "utf8")), n = JSON.parse(await e.readFile("config.json", "utf8")), i = await e.readFile("src/index.ts", "utf8");
|
|
53
53
|
let s = !1, r = !1;
|
|
54
54
|
const t = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, f = [...i.matchAll(t)].filter((c) => !c[1] && c[2] !== "constructor").map((c) => {
|
|
55
55
|
const l = c[2];
|
|
@@ -57,7 +57,7 @@ async function j() {
|
|
|
57
57
|
});
|
|
58
58
|
f.length > 0 ? console.info(`âšī¸ Implements ${f.length} operations.`) : console.warn("â ī¸ Implements no operations.");
|
|
59
59
|
const a = r && s ? "bidirectional" : r ? "source" : s ? "destination" : null;
|
|
60
|
-
a ? console.info(`âšī¸ Supports ${a} usage.`) : console.warn("â ī¸ No usage identified."), o.name && (
|
|
60
|
+
a ? console.info(`âšī¸ Supports ${a} usage.`) : console.warn("â ī¸ No usage identified."), o.name && (n.id = o.name), n.operations = f, n.usageId = a, o.version && (n.version = o.version), await e.writeFile("config.json", JSON.stringify(n, void 0, 4), "utf8"), console.info("â
Connector configuration built.");
|
|
61
61
|
} catch (o) {
|
|
62
62
|
console.error("â Error building connector configuration.", o);
|
|
63
63
|
}
|
|
@@ -65,8 +65,8 @@ async function j() {
|
|
|
65
65
|
async function x() {
|
|
66
66
|
try {
|
|
67
67
|
console.info("đ Building context configuration...");
|
|
68
|
-
const o = JSON.parse(await
|
|
69
|
-
o.name && (
|
|
68
|
+
const o = JSON.parse(await e.readFile("package.json", "utf8")), n = JSON.parse(await e.readFile("config.json", "utf8")), i = await e.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, r = [...i.matchAll(s)].filter((t) => !t[1] && t[2] !== "constructor").map((t) => t[2]);
|
|
69
|
+
o.name && (n.id = o.name), n.operations = r, o.version && (n.version = o.version), await e.writeFile("config.json", JSON.stringify(n, void 0, 4), "utf8"), console.info("â
Context configuration built.");
|
|
70
70
|
} catch (o) {
|
|
71
71
|
console.error("â Error building context configuration.", o);
|
|
72
72
|
}
|
|
@@ -74,8 +74,8 @@ async function x() {
|
|
|
74
74
|
async function F() {
|
|
75
75
|
try {
|
|
76
76
|
console.info("đ Building informer configuration...");
|
|
77
|
-
const o = JSON.parse(await
|
|
78
|
-
o.name && (
|
|
77
|
+
const o = JSON.parse(await e.readFile("package.json", "utf8")), n = JSON.parse(await e.readFile("config.json", "utf8")), i = await e.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, r = [...i.matchAll(s)].filter((t) => !t[1] && t[2] !== "constructor").map((t) => t[2]);
|
|
78
|
+
o.name && (n.id = o.name), n.operations = r, o.version && (n.version = o.version), await e.writeFile("config.json", JSON.stringify(n, void 0, 4), "utf8"), console.info("â
Informer configuration built.");
|
|
79
79
|
} catch (o) {
|
|
80
80
|
console.error("â Error building informer configuration.", o);
|
|
81
81
|
}
|
|
@@ -83,8 +83,8 @@ async function F() {
|
|
|
83
83
|
async function C() {
|
|
84
84
|
try {
|
|
85
85
|
console.info("đ Building presenter configuration...");
|
|
86
|
-
const o = JSON.parse(await
|
|
87
|
-
o.name && (
|
|
86
|
+
const o = JSON.parse(await e.readFile("package.json", "utf8")), n = JSON.parse(await e.readFile("config.json", "utf8")), i = await e.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, r = [...i.matchAll(s)].filter((t) => !t[1] && t[2] !== "constructor").map((t) => t[2]);
|
|
87
|
+
o.name && (n.id = o.name), n.operations = r, o.version && (n.version = o.version), await e.writeFile("config.json", JSON.stringify(n, void 0, 4), "utf8"), console.info("â
Presenter configuration built.");
|
|
88
88
|
} catch (o) {
|
|
89
89
|
console.error("â Error building context configuration.", o);
|
|
90
90
|
}
|
|
@@ -92,12 +92,12 @@ async function C() {
|
|
|
92
92
|
async function k() {
|
|
93
93
|
try {
|
|
94
94
|
console.info("đ Bumping version...");
|
|
95
|
-
const o = JSON.parse(await
|
|
95
|
+
const o = JSON.parse(await e.readFile("package.json", "utf8"));
|
|
96
96
|
if (o.version) {
|
|
97
|
-
const
|
|
98
|
-
o.version = `${i[0]}.${i[1]}.${Number(i[2]) + 1}`, await
|
|
97
|
+
const n = o.version, i = o.version.split(".");
|
|
98
|
+
o.version = `${i[0]}.${i[1]}.${Number(i[2]) + 1}`, await e.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.info(`â
Version bumped from ${n} to ${o.version}.`);
|
|
99
99
|
} else
|
|
100
|
-
o.version = "0.0.001", await
|
|
100
|
+
o.version = "0.0.001", await e.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.warn(`â ī¸ Version initialised to ${o.version}.`);
|
|
101
101
|
} catch (o) {
|
|
102
102
|
console.error("â Error bumping package version.", o);
|
|
103
103
|
}
|
|
@@ -108,11 +108,11 @@ function E(o) {
|
|
|
108
108
|
async function I() {
|
|
109
109
|
try {
|
|
110
110
|
console.info("đ Sending deployment notice...");
|
|
111
|
-
const o = JSON.parse(await
|
|
111
|
+
const o = JSON.parse(await e.readFile("config.json", "utf8")), n = {
|
|
112
112
|
body: JSON.stringify(o),
|
|
113
113
|
headers: { "Content-Type": "application/json" },
|
|
114
114
|
method: "PUT"
|
|
115
|
-
}, i = await fetch(`https://api.datapos.app/states/${o.id}`,
|
|
115
|
+
}, i = await fetch(`https://api.datapos.app/states/${o.id}`, n);
|
|
116
116
|
if (!i.ok) throw new Error(await i.text());
|
|
117
117
|
console.info("â
Deployment notice sent.");
|
|
118
118
|
} catch (o) {
|
|
@@ -122,20 +122,20 @@ async function I() {
|
|
|
122
122
|
async function R() {
|
|
123
123
|
try {
|
|
124
124
|
console.info("đ Synchronising with GitHub....");
|
|
125
|
-
const o = JSON.parse(await
|
|
125
|
+
const o = JSON.parse(await e.readFile("package.json", "utf8"));
|
|
126
126
|
await g("git add ."), await g(`git commit -m "v${o.version}"`), await g("git push origin main:main"), console.info(`â
Synchronised version ${o.version} with GitHub.`);
|
|
127
127
|
} catch (o) {
|
|
128
128
|
console.error("â Error synchronising with GitHub.", o);
|
|
129
129
|
}
|
|
130
130
|
}
|
|
131
|
-
async function P(o,
|
|
131
|
+
async function P(o, n) {
|
|
132
132
|
try {
|
|
133
133
|
console.info("đ Uploading directory to R2....");
|
|
134
134
|
async function i(r, t, f) {
|
|
135
135
|
for (const a of f) {
|
|
136
136
|
const c = `${r}/${a}`, l = `${t}/${a}`;
|
|
137
|
-
if ((await
|
|
138
|
-
const p = await
|
|
137
|
+
if ((await e.stat(c)).isDirectory()) {
|
|
138
|
+
const p = await e.readdir(c);
|
|
139
139
|
await i(c, l, p);
|
|
140
140
|
} else {
|
|
141
141
|
console.info(`âī¸ Uploading '${r}/${a}'...`);
|
|
@@ -144,8 +144,8 @@ async function P(o, e) {
|
|
|
144
144
|
}
|
|
145
145
|
}
|
|
146
146
|
}
|
|
147
|
-
const s = await
|
|
148
|
-
await i(`${o}/${
|
|
147
|
+
const s = await e.readdir(`${o}/${n}/`);
|
|
148
|
+
await i(`${o}/${n}`, n, s), console.info("â
Directory uploaded to R2.");
|
|
149
149
|
} catch (i) {
|
|
150
150
|
console.error("â Error uploading directory to R2.", i);
|
|
151
151
|
}
|
|
@@ -153,11 +153,11 @@ async function P(o, e) {
|
|
|
153
153
|
async function A() {
|
|
154
154
|
try {
|
|
155
155
|
console.info("đ Uploading module configuration....");
|
|
156
|
-
const o = JSON.parse(await
|
|
156
|
+
const o = JSON.parse(await e.readFile("config.json", "utf8")), n = o.id, i = {
|
|
157
157
|
body: JSON.stringify(o),
|
|
158
158
|
headers: { "Content-Type": "application/json" },
|
|
159
159
|
method: "PUT"
|
|
160
|
-
}, s = await fetch(`https://api.datapos.app/states/${
|
|
160
|
+
}, s = await fetch(`https://api.datapos.app/states/${n}`, i);
|
|
161
161
|
if (!s.ok) throw new Error(await s.text());
|
|
162
162
|
console.info("â
Module configuration uploaded.");
|
|
163
163
|
} catch (o) {
|
|
@@ -167,13 +167,13 @@ async function A() {
|
|
|
167
167
|
async function T(o) {
|
|
168
168
|
try {
|
|
169
169
|
console.info("đ Uploading module to R2...");
|
|
170
|
-
const i = `v${JSON.parse(await
|
|
170
|
+
const i = `v${JSON.parse(await e.readFile("package.json", "utf8")).version}`;
|
|
171
171
|
async function s(r, t = "") {
|
|
172
|
-
const f = await
|
|
172
|
+
const f = await e.readdir(r, { withFileTypes: !0 });
|
|
173
173
|
for (const a of f) {
|
|
174
174
|
const c = `${r}/${a.name}`, l = t ? `${t}/${a.name}` : a.name;
|
|
175
175
|
if (!a.isDirectory()) {
|
|
176
|
-
const d = `${o}
|
|
176
|
+
const d = `${o}_${i}/${l}`.replace(/\\/g, "/"), p = a.name.endsWith(".js") ? "application/javascript" : a.name.endsWith(".css") ? "text/css" : "application/octet-stream";
|
|
177
177
|
console.info(`âī¸ Uploading '${l}' â '${d}'...`);
|
|
178
178
|
const { stderr: u } = await g(`wrangler r2 object put "${d}" --file="${c}" --content-type ${p} --jurisdiction=eu --remote`);
|
|
179
179
|
if (u) throw new Error(u);
|
|
@@ -181,8 +181,8 @@ async function T(o) {
|
|
|
181
181
|
}
|
|
182
182
|
}
|
|
183
183
|
await s("dist"), console.info("â
Module uploaded to R2.");
|
|
184
|
-
} catch (
|
|
185
|
-
console.error("â Error uploading module to R2.",
|
|
184
|
+
} catch (n) {
|
|
185
|
+
console.error("â Error uploading module to R2.", n);
|
|
186
186
|
}
|
|
187
187
|
}
|
|
188
188
|
export {
|
package/package.json
CHANGED