@datapos/datapos-development 0.3.60 â 0.3.62
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -8,177 +8,176 @@ let b = (o = 21) => {
|
|
|
8
8
|
e += y[i[o] & 63];
|
|
9
9
|
return e;
|
|
10
10
|
};
|
|
11
|
-
const
|
|
11
|
+
const h = ["createObject", "dropObject", "removeRecords", "upsertRecords"], v = ["findObject", "getRecord", "listNodes", "previewObject", "retrieveRecords"], p = w(m);
|
|
12
12
|
async function $() {
|
|
13
13
|
try {
|
|
14
|
-
console.
|
|
14
|
+
console.info("đ Building configuration...");
|
|
15
15
|
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8"));
|
|
16
|
-
o.name && (e.id = o.name), o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.
|
|
16
|
+
o.name && (e.id = o.name), o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("â
Configuration built.");
|
|
17
17
|
} catch (o) {
|
|
18
|
-
console.
|
|
18
|
+
console.error("â Error building configuration.", o);
|
|
19
19
|
}
|
|
20
20
|
}
|
|
21
21
|
async function J(o) {
|
|
22
22
|
try {
|
|
23
|
-
console.
|
|
23
|
+
console.info(`đ Building public directory index for identifier '${o}'...`);
|
|
24
24
|
const e = {};
|
|
25
|
-
async function i(
|
|
26
|
-
console.
|
|
27
|
-
const
|
|
28
|
-
e[
|
|
29
|
-
for (const a of
|
|
30
|
-
const
|
|
25
|
+
async function i(t, r) {
|
|
26
|
+
console.info(`âī¸ Processing directory '${t}'...`);
|
|
27
|
+
const f = [], l = t.substring(`public/${o}`.length);
|
|
28
|
+
e[l] = f;
|
|
29
|
+
for (const a of r) {
|
|
30
|
+
const c = `${t}/${a}`;
|
|
31
31
|
try {
|
|
32
|
-
const d = await n.stat(
|
|
32
|
+
const d = await n.stat(c);
|
|
33
33
|
if (d.isDirectory()) {
|
|
34
|
-
const
|
|
35
|
-
|
|
34
|
+
const u = await n.readdir(c), g = { childCount: u.length, name: `${a}`, typeId: "folder" };
|
|
35
|
+
f.push(g), await i(c, u);
|
|
36
36
|
} else {
|
|
37
|
-
const
|
|
38
|
-
|
|
37
|
+
const u = { id: b(), lastModifiedAt: d.mtimeMs, name: a, size: d.size, typeId: "object" };
|
|
38
|
+
f.push(u);
|
|
39
39
|
}
|
|
40
40
|
} catch (d) {
|
|
41
|
-
|
|
41
|
+
throw new Error(`Unable to get information for '${a}' in 'buildPublicDirectoryIndex'. ${String(d)}`);
|
|
42
42
|
}
|
|
43
43
|
}
|
|
44
|
-
|
|
45
|
-
const d = a.typeId.localeCompare(
|
|
46
|
-
return d !== 0 ? d : a.name.localeCompare(
|
|
44
|
+
f.sort((a, c) => {
|
|
45
|
+
const d = a.typeId.localeCompare(c.typeId);
|
|
46
|
+
return d !== 0 ? d : a.name.localeCompare(c.name);
|
|
47
47
|
});
|
|
48
48
|
}
|
|
49
49
|
const s = await n.readdir(`public/${o}`);
|
|
50
|
-
await i(`public/${o}`, s), await n.writeFile(`./public/${o}Index.json`, JSON.stringify(e), "utf8"), console.
|
|
50
|
+
await i(`public/${o}`, s), await n.writeFile(`./public/${o}Index.json`, JSON.stringify(e), "utf8"), console.info("â
Public directory index built.");
|
|
51
51
|
} catch (e) {
|
|
52
|
-
console.
|
|
52
|
+
console.error("â Error building public directory index.", e);
|
|
53
53
|
}
|
|
54
54
|
}
|
|
55
55
|
async function j() {
|
|
56
56
|
try {
|
|
57
|
-
console.
|
|
57
|
+
console.info("đ Building connector configuration...");
|
|
58
58
|
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8");
|
|
59
|
-
let s = !1,
|
|
60
|
-
const
|
|
61
|
-
const
|
|
62
|
-
return s = s ||
|
|
59
|
+
let s = !1, t = !1;
|
|
60
|
+
const r = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, f = [...i.matchAll(r)].filter((a) => !a[1] && a[2] !== "constructor").map((a) => {
|
|
61
|
+
const c = a[2];
|
|
62
|
+
return s = s || h.includes(c), t = t || v.includes(c), c;
|
|
63
63
|
});
|
|
64
|
-
|
|
65
|
-
const
|
|
66
|
-
console.
|
|
64
|
+
f.length > 0 ? console.info(`âšī¸ Implements ${f.length} operations.`) : console.warn("â ī¸ Implements no operations.");
|
|
65
|
+
const l = t && s ? "bidirectional" : t ? "source" : s ? "destination" : null;
|
|
66
|
+
l ? console.info(`âšī¸ Supports ${l} usage.`) : console.warn("â ī¸ No usage identified."), o.name && (e.id = o.name), e.operations = f, e.usageId = l, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("â
Connector configuration built.");
|
|
67
67
|
} catch (o) {
|
|
68
|
-
console.
|
|
68
|
+
console.error("â Error building connector configuration.", o);
|
|
69
69
|
}
|
|
70
70
|
}
|
|
71
71
|
async function x() {
|
|
72
72
|
try {
|
|
73
|
-
console.
|
|
74
|
-
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm,
|
|
75
|
-
o.name && (e.id = o.name), e.operations =
|
|
73
|
+
console.info("đ Building context configuration...");
|
|
74
|
+
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, t = [...i.matchAll(s)].filter((r) => !r[1] && r[2] !== "constructor").map((r) => r[2]);
|
|
75
|
+
o.name && (e.id = o.name), e.operations = t, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("â
Context configuration built.");
|
|
76
76
|
} catch (o) {
|
|
77
|
-
console.
|
|
77
|
+
console.error("â Error building context configuration.", o);
|
|
78
78
|
}
|
|
79
79
|
}
|
|
80
80
|
async function F() {
|
|
81
81
|
try {
|
|
82
|
-
console.
|
|
83
|
-
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm,
|
|
84
|
-
o.name && (e.id = o.name), e.operations =
|
|
82
|
+
console.info("đ Building informer configuration...");
|
|
83
|
+
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, t = [...i.matchAll(s)].filter((r) => !r[1] && r[2] !== "constructor").map((r) => r[2]);
|
|
84
|
+
o.name && (e.id = o.name), e.operations = t, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("â
Informer configuration built.");
|
|
85
85
|
} catch (o) {
|
|
86
|
-
console.
|
|
86
|
+
console.error("â Error building informer configuration.", o);
|
|
87
87
|
}
|
|
88
88
|
}
|
|
89
89
|
async function C() {
|
|
90
90
|
try {
|
|
91
|
-
console.
|
|
92
|
-
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm,
|
|
93
|
-
o.name && (e.id = o.name), e.operations =
|
|
91
|
+
console.info("đ Building presenter configuration...");
|
|
92
|
+
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, t = [...i.matchAll(s)].filter((r) => !r[1] && r[2] !== "constructor").map((r) => r[2]);
|
|
93
|
+
o.name && (e.id = o.name), e.operations = t, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("â
Presenter configuration built.");
|
|
94
94
|
} catch (o) {
|
|
95
|
-
console.
|
|
95
|
+
console.error("â Error building context configuration.", o);
|
|
96
96
|
}
|
|
97
97
|
}
|
|
98
|
-
async function
|
|
98
|
+
async function E() {
|
|
99
99
|
try {
|
|
100
|
-
console.
|
|
100
|
+
console.info("đ Bumping version...");
|
|
101
101
|
const o = JSON.parse(await n.readFile("package.json", "utf8"));
|
|
102
102
|
if (o.version) {
|
|
103
103
|
const e = o.version, i = o.version.split(".");
|
|
104
|
-
o.version = `${i[0]}.${i[1]}.${Number(i[2]) + 1}`, await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.
|
|
104
|
+
o.version = `${i[0]}.${i[1]}.${Number(i[2]) + 1}`, await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.info(`â
Version bumped from ${e} to ${o.version}.`);
|
|
105
105
|
} else
|
|
106
|
-
o.version = "0.0.001", await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.
|
|
106
|
+
o.version = "0.0.001", await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.warn(`â ī¸ Version initialised to ${o.version}.`);
|
|
107
107
|
} catch (o) {
|
|
108
|
-
console.
|
|
108
|
+
console.error("â Error bumping package version.", o);
|
|
109
109
|
}
|
|
110
110
|
}
|
|
111
|
-
|
|
111
|
+
function k(o) {
|
|
112
|
+
console.error(`â ${o}`);
|
|
113
|
+
}
|
|
114
|
+
async function I() {
|
|
112
115
|
try {
|
|
113
|
-
console.
|
|
116
|
+
console.info("đ Sending deployment notice...");
|
|
114
117
|
const o = JSON.parse(await n.readFile("config.json", "utf8")), e = {
|
|
115
118
|
body: JSON.stringify(o),
|
|
116
119
|
headers: { "Content-Type": "application/json" },
|
|
117
120
|
method: "PUT"
|
|
118
121
|
}, i = await fetch(`https://api.datapos.app/states/${o.id}`, e);
|
|
119
|
-
if (!i.ok)
|
|
120
|
-
|
|
121
|
-
console.log("â
Deployment notice sent.");
|
|
122
|
+
if (!i.ok) throw new Error(await i.text());
|
|
123
|
+
console.info("â
Deployment notice sent.");
|
|
122
124
|
} catch (o) {
|
|
123
|
-
console.
|
|
125
|
+
console.error("â Error sending deployment notice.", o);
|
|
124
126
|
}
|
|
125
127
|
}
|
|
126
|
-
async function
|
|
128
|
+
async function R() {
|
|
127
129
|
try {
|
|
128
|
-
console.
|
|
130
|
+
console.info("đ Synchronising with GitHub....");
|
|
129
131
|
const o = JSON.parse(await n.readFile("package.json", "utf8"));
|
|
130
|
-
await
|
|
132
|
+
await p("git add ."), await p(`git commit -m "v${o.version}"`), await p("git push origin main:main"), console.info(`â
Synchronised version ${o.version} with GitHub.`);
|
|
131
133
|
} catch (o) {
|
|
132
|
-
console.
|
|
134
|
+
console.error("â Error synchronising with GitHub.", o);
|
|
133
135
|
}
|
|
134
136
|
}
|
|
135
|
-
async function
|
|
137
|
+
async function A(o, e) {
|
|
136
138
|
try {
|
|
137
|
-
console.
|
|
138
|
-
async function i(
|
|
139
|
-
for (const
|
|
140
|
-
const a = `${
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
}
|
|
149
|
-
} catch (d) {
|
|
150
|
-
console.error(`Unable to get information for '${c}' in 'uploadDirectoryToR2'.`, d);
|
|
139
|
+
console.info("đ Uploading directory to R2....");
|
|
140
|
+
async function i(t, r, f) {
|
|
141
|
+
for (const l of f) {
|
|
142
|
+
const a = `${t}/${l}`, c = `${r}/${l}`;
|
|
143
|
+
if ((await n.stat(a)).isDirectory()) {
|
|
144
|
+
const u = await n.readdir(a);
|
|
145
|
+
await i(a, c, u);
|
|
146
|
+
} else {
|
|
147
|
+
console.info(`âī¸ Uploading '${t}/${l}'.`);
|
|
148
|
+
const u = `wrangler r2 object put "datapos-sample-data-eu/${r}/${l}" --file="${t}/${l}" --jurisdiction=eu --remote`, g = await p(u);
|
|
149
|
+
if (g.stderr) throw new Error(g.stderr);
|
|
151
150
|
}
|
|
152
151
|
}
|
|
153
152
|
}
|
|
154
153
|
const s = await n.readdir(`${o}/${e}/`);
|
|
155
|
-
await i(`${o}/${e}`, e, s), console.
|
|
154
|
+
await i(`${o}/${e}`, e, s), console.info("â
Directory uploaded to R2.");
|
|
156
155
|
} catch (i) {
|
|
157
|
-
console.
|
|
156
|
+
console.error("â Error uploading directory to R2.", i);
|
|
158
157
|
}
|
|
159
158
|
}
|
|
160
|
-
async function
|
|
159
|
+
async function P() {
|
|
161
160
|
try {
|
|
162
|
-
console.
|
|
161
|
+
console.info("đ Uploading module configuration....");
|
|
163
162
|
const o = JSON.parse(await n.readFile("config.json", "utf8")), e = o.id, i = {
|
|
164
163
|
body: JSON.stringify(o),
|
|
165
164
|
headers: { "Content-Type": "application/json" },
|
|
166
165
|
method: "PUT"
|
|
167
166
|
}, s = await fetch(`https://api.datapos.app/states/${e}`, i);
|
|
168
|
-
s.ok
|
|
167
|
+
if (!s.ok) throw new Error(await s.text());
|
|
168
|
+
console.info("â
Module configuration uploaded.");
|
|
169
169
|
} catch (o) {
|
|
170
|
-
console.
|
|
170
|
+
console.error("â Error uploading module configuration.", o);
|
|
171
171
|
}
|
|
172
172
|
}
|
|
173
|
-
async function
|
|
173
|
+
async function U(o, e) {
|
|
174
174
|
try {
|
|
175
|
-
console.
|
|
176
|
-
const i = JSON.parse(await n.readFile("package.json", "utf8")), s = e.replace(/^(.*?\.)/, `$1v${i.version}.`), {
|
|
177
|
-
|
|
178
|
-
);
|
|
179
|
-
r && console.log(r), t && console.error(t), console.log("â
Directory cleared.");
|
|
175
|
+
console.info("đ Uploading module to R2....");
|
|
176
|
+
const i = JSON.parse(await n.readFile("package.json", "utf8")), s = e.replace(/^(.*?\.)/, `$1v${i.version}.`), { stderr: t } = await p(`wrangler r2 object put ${s} --file=dist/${o} --content-type application/javascript --jurisdiction=eu --remote`);
|
|
177
|
+
if (t) throw new Error(t);
|
|
178
|
+
console.info("â
Module uploaded to R2.");
|
|
180
179
|
} catch (i) {
|
|
181
|
-
console.
|
|
180
|
+
console.error("â Error uploading module to R2.", i);
|
|
182
181
|
}
|
|
183
182
|
}
|
|
184
183
|
export {
|
|
@@ -188,10 +187,11 @@ export {
|
|
|
188
187
|
F as buildInformerConfig,
|
|
189
188
|
C as buildPresenterConfig,
|
|
190
189
|
J as buildPublicDirectoryIndex,
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
I as
|
|
194
|
-
R as
|
|
195
|
-
|
|
196
|
-
|
|
190
|
+
E as bumpVersion,
|
|
191
|
+
k as echoError,
|
|
192
|
+
I as sendDeploymentNotice,
|
|
193
|
+
R as syncWithGitHub,
|
|
194
|
+
A as uploadDirectoryToR2,
|
|
195
|
+
P as uploadModuleConfig,
|
|
196
|
+
U as uploadModuleToR2
|
|
197
197
|
};
|
|
@@ -8,9 +8,10 @@ declare function buildContextConfig(): Promise<void>;
|
|
|
8
8
|
declare function buildInformerConfig(): Promise<void>;
|
|
9
9
|
declare function buildPresenterConfig(): Promise<void>;
|
|
10
10
|
declare function bumpVersion(): Promise<void>;
|
|
11
|
+
declare function echoError(message: string): void;
|
|
11
12
|
declare function sendDeploymentNotice(): Promise<void>;
|
|
12
13
|
declare function syncWithGitHub(): Promise<void>;
|
|
13
14
|
declare function uploadDirectoryToR2(sourceDirectory: string, uploadDirectory: string): Promise<void>;
|
|
14
15
|
declare function uploadModuleConfig(): Promise<void>;
|
|
15
16
|
declare function uploadModuleToR2(fromPath: string, toPath: string): Promise<void>;
|
|
16
|
-
export { buildConfig, buildConnectorConfig, buildContextConfig, buildInformerConfig, buildPresenterConfig, buildPublicDirectoryIndex, bumpVersion, sendDeploymentNotice, syncWithGitHub, uploadDirectoryToR2, uploadModuleConfig, uploadModuleToR2 };
|
|
17
|
+
export { buildConfig, buildConnectorConfig, buildContextConfig, buildInformerConfig, buildPresenterConfig, buildPublicDirectoryIndex, bumpVersion, echoError, sendDeploymentNotice, syncWithGitHub, uploadDirectoryToR2, uploadModuleConfig, uploadModuleToR2 };
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"license": "MIT",
|
|
4
4
|
"private": false,
|
|
5
5
|
"type": "module",
|
|
6
|
-
"version": "0.3.
|
|
6
|
+
"version": "0.3.62",
|
|
7
7
|
"files": [
|
|
8
8
|
"dist"
|
|
9
9
|
],
|
|
@@ -37,16 +37,16 @@
|
|
|
37
37
|
"scripts": {
|
|
38
38
|
"audit": "npm audit",
|
|
39
39
|
"build": "vite build",
|
|
40
|
-
"
|
|
41
|
-
"bumpVersion": "node -e \"import('./dist/datapos-development.es.js').then(m => m.bumpVersion())\"",
|
|
40
|
+
"bump:version": "node -e \"import('./dist/datapos-development.es.js').then(m => m.bumpVersion())\"",
|
|
42
41
|
"check": "npm outdated; npm-check-updates -i && retire",
|
|
43
42
|
"document": "license-report --only=prod,peer > LICENSES.json && license-report-check --source ./LICENSES.json --allowed 'MIT' --allowed 'n/a' --allowed 'Apache-2.0' --output=table",
|
|
44
43
|
"format": "prettier --write src/",
|
|
45
44
|
"lint": "eslint .",
|
|
46
|
-
"
|
|
47
|
-
"release": "npm run
|
|
48
|
-
"
|
|
49
|
-
"test": "echo \"
|
|
50
|
-
"
|
|
45
|
+
"publish:toNPM": "npm publish --access public",
|
|
46
|
+
"release": "npm run sync:withGitHub && npm run build && npm run publish:toNPM",
|
|
47
|
+
"sync:withGitHub": "npm run bump:version && node -e \"import('./dist/datapos-development.es.js').then(m => m.syncWithGitHub())\"",
|
|
48
|
+
"test": "bash -c 'echo \"â Test script not implemented.\"'",
|
|
49
|
+
"update:dataPosDeps": "npm run _update:developDeps",
|
|
50
|
+
"_update:developDeps": "npm install --save-dev @datapos/datapos-development@latest"
|
|
51
51
|
}
|
|
52
52
|
}
|