@datapos/datapos-development 0.3.96 → 0.3.102

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,63 +1,63 @@
1
- import { exec as m } from "child_process";
2
- import { promises as n } from "fs";
1
+ import { exec as m } from "node:child_process";
2
+ import { promises as e } from "node:fs";
3
3
  import { nanoid as w } from "nanoid";
4
- import { promisify as y } from "util";
5
- const h = ["createObject", "dropObject", "removeRecords", "upsertRecords"], $ = ["findObject", "getRecord", "listNodes", "previewObject", "retrieveRecords"], g = y(m);
4
+ import { promisify as y } from "node:util";
5
+ const h = ["createObject", "dropObject", "removeRecords", "upsertRecords"], b = ["findObject", "getRecord", "listNodes", "previewObject", "retrieveRecords"], g = y(m);
6
6
  async function S() {
7
7
  try {
8
8
  console.info("🚀 Building configuration...");
9
- const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8"));
10
- o.name && (e.id = o.name.replace("@datapos/", "").replace("@data-positioning/", "")), o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Configuration built.");
9
+ const o = JSON.parse(await e.readFile("package.json", "utf8")), n = JSON.parse(await e.readFile("config.json", "utf8"));
10
+ o.name != null && (n.id = o.name.replace("@datapos/", "").replace("@data-positioning/", "")), o.version != null && (n.version = o.version), await e.writeFile("config.json", JSON.stringify(n, void 0, 4), "utf8"), console.info("✅ Configuration built.");
11
11
  } catch (o) {
12
12
  console.error("❌ Error building configuration.", o);
13
13
  }
14
14
  }
15
- async function J(o) {
15
+ async function O(o) {
16
16
  try {
17
17
  console.info(`🚀 Building public directory index for identifier '${o}'...`);
18
- const e = {};
19
- async function i(t, r) {
20
- console.info(`âš™ī¸ Processing directory '${t}'...`);
21
- const d = [], s = t.substring(`public/${o}`.length);
22
- e[s] = d;
23
- for (const a of r) {
24
- const l = `${t}/${a}`;
18
+ const n = {};
19
+ async function t(r, s) {
20
+ console.info(`âš™ī¸ Processing directory '${r}'...`);
21
+ const d = [], a = r.substring(`public/${o}`.length);
22
+ n[a] = d;
23
+ for (const c of s) {
24
+ const l = `${r}/${c}`;
25
25
  try {
26
- const f = await n.stat(l);
26
+ const f = await e.stat(l);
27
27
  if (f.isDirectory()) {
28
- const p = await n.readdir(l), u = { childCount: p.length, name: `${a}`, typeId: "folder" };
29
- d.push(u), await i(l, p);
28
+ const p = await e.readdir(l), u = { childCount: p.length, name: `${c}`, typeId: "folder" };
29
+ d.push(u), await t(l, p);
30
30
  } else {
31
- const p = { id: w(), lastModifiedAt: f.mtimeMs, name: a, size: f.size, typeId: "object" };
31
+ const p = { id: w(), lastModifiedAt: f.mtimeMs, name: c, size: f.size, typeId: "object" };
32
32
  d.push(p);
33
33
  }
34
34
  } catch (f) {
35
- throw new Error(`Unable to get information for '${a}' in 'buildPublicDirectoryIndex'. ${String(f)}`);
35
+ throw new Error(`Unable to get information for '${c}' in 'buildPublicDirectoryIndex'. ${String(f)}`);
36
36
  }
37
37
  }
38
- d.sort((a, l) => {
39
- const f = a.typeId.localeCompare(l.typeId);
40
- return f !== 0 ? f : a.name.localeCompare(l.name);
38
+ d.sort((c, l) => {
39
+ const f = c.typeId.localeCompare(l.typeId);
40
+ return f === 0 ? c.name.localeCompare(l.name) : f;
41
41
  });
42
42
  }
43
- const c = await n.readdir(`public/${o}`);
44
- await i(`public/${o}`, c), await n.writeFile(`./public/${o}Index.json`, JSON.stringify(e), "utf8"), console.info("✅ Public directory index built.");
45
- } catch (e) {
46
- console.error("❌ Error building public directory index.", e);
43
+ const i = await e.readdir(`public/${o}`);
44
+ await t(`public/${o}`, i), await e.writeFile(`./public/${o}Index.json`, JSON.stringify(n), "utf8"), console.info("✅ Public directory index built.");
45
+ } catch (n) {
46
+ console.error("❌ Error building public directory index.", n);
47
47
  }
48
48
  }
49
- async function j() {
49
+ async function J() {
50
50
  try {
51
51
  console.info("🚀 Building connector configuration...");
52
- const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8");
53
- let c = !1, t = !1;
54
- const r = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, d = [...i.matchAll(r)].filter((a) => !a[1] && a[2] !== "constructor").map((a) => {
55
- const l = a[2];
56
- return c = c || h.includes(l), t = t || $.includes(l), l;
52
+ const o = JSON.parse(await e.readFile("package.json", "utf8")), n = JSON.parse(await e.readFile("config.json", "utf8")), t = await e.readFile("src/index.ts", "utf8");
53
+ let i = !1, r = !1;
54
+ const s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, d = [...t.matchAll(s)].filter((c) => !c[1] && c[2] !== "constructor").map((c) => {
55
+ const l = c[2];
56
+ return i = i || h.includes(l), r = r || b.includes(l), l;
57
57
  });
58
58
  d.length > 0 ? console.info(`â„šī¸ Implements ${d.length} operations.`) : console.warn("âš ī¸ Implements no operations.");
59
- const s = t && c ? "bidirectional" : t ? "source" : c ? "destination" : "unknown";
60
- s && console.info(`â„šī¸ Supports ${s} usage.`), o.name && (e.id = o.name), e.operations = d, e.usageId = s, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Connector configuration built.");
59
+ const a = r && i ? "bidirectional" : r ? "source" : i ? "destination" : "unknown";
60
+ a && console.info(`â„šī¸ Supports ${a} usage.`), o.name != null && (n.id = o.name), n.operations = d, n.usageId = a, o.version != null && (n.version = o.version), await e.writeFile("config.json", JSON.stringify(n, void 0, 4), "utf8"), console.info("✅ Connector configuration built.");
61
61
  } catch (o) {
62
62
  console.error("❌ Error building connector configuration.", o);
63
63
  }
@@ -65,46 +65,59 @@ async function j() {
65
65
  async function x() {
66
66
  try {
67
67
  console.info("🚀 Building context configuration...");
68
- const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), c = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, t = [...i.matchAll(c)].filter((r) => !r[1] && r[2] !== "constructor").map((r) => r[2]);
69
- o.name && (e.id = o.name), e.operations = t, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Context configuration built.");
68
+ const o = JSON.parse(await e.readFile("package.json", "utf8")), n = JSON.parse(await e.readFile("config.json", "utf8")), t = await e.readFile("src/index.ts", "utf8"), i = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, r = [...t.matchAll(i)].filter((s) => !s[1] && s[2] !== "constructor").map((s) => s[2]);
69
+ o.name != null && (n.id = o.name), n.operations = r, o.version != null && (n.version = o.version), await e.writeFile("config.json", JSON.stringify(n, void 0, 4), "utf8"), console.info("✅ Context configuration built.");
70
70
  } catch (o) {
71
71
  console.error("❌ Error building context configuration.", o);
72
72
  }
73
73
  }
74
- async function F() {
74
+ async function j() {
75
75
  try {
76
76
  console.info("🚀 Building presenter configuration...");
77
- const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), c = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, t = [...i.matchAll(c)].filter((r) => !r[1] && r[2] !== "constructor").map((r) => r[2]);
78
- o.name && (e.id = o.name), e.operations = t, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Presenter configuration built.");
77
+ const o = JSON.parse(await e.readFile("package.json", "utf8")), n = JSON.parse(await e.readFile("config.json", "utf8")), t = await e.readFile("src/index.ts", "utf8"), i = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, r = [...t.matchAll(i)].filter((s) => !s[1] && s[2] !== "constructor").map((s) => s[2]);
78
+ o.name != null && (n.id = o.name), n.operations = r, o.version != null && (n.version = o.version), await e.writeFile("config.json", JSON.stringify(n, void 0, 4), "utf8"), console.info("✅ Presenter configuration built.");
79
79
  } catch (o) {
80
80
  console.error("❌ Error building context configuration.", o);
81
81
  }
82
82
  }
83
- async function k() {
83
+ async function C() {
84
84
  try {
85
85
  console.info("🚀 Bumping version...");
86
- const o = JSON.parse(await n.readFile("package.json", "utf8"));
87
- if (o.version) {
88
- const e = o.version, i = o.version.split(".");
89
- o.version = `${i[0]}.${i[1]}.${Number(i[2]) + 1}`, await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.info(`✅ Version bumped from ${e} to ${o.version}.`);
86
+ const o = JSON.parse(await e.readFile("package.json", "utf8"));
87
+ if (o.version != null) {
88
+ const n = o.version, t = o.version.split(".");
89
+ o.version = `${t[0]}.${t[1]}.${Number(t[2]) + 1}`, await e.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.info(`✅ Version bumped from ${n} to ${o.version}.`);
90
90
  } else
91
- o.version = "0.0.001", await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.warn(`âš ī¸ Version initialised to ${o.version}.`);
91
+ o.version = "0.0.001", await e.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.warn(`âš ī¸ Version initialised to ${o.version}.`);
92
92
  } catch (o) {
93
93
  console.error("❌ Error bumping package version.", o);
94
94
  }
95
95
  }
96
- function C(o) {
96
+ function F(o) {
97
97
  console.error(`❌ ${o} script not implemented.`);
98
98
  }
99
- async function E() {
99
+ async function R() {
100
+ const o = "<!-- DEPENDENCY_LICENSES_START -->", n = "<!-- DEPENDENCY_LICENSES_END -->";
101
+ try {
102
+ const t = (await e.readFile("./licenses.md", "utf8")).trim(), i = await e.readFile("./README.md", "utf8"), r = i.indexOf(o), s = i.indexOf(n);
103
+ (r === -1 || s === -1) && (console.error("Error: Markers not found in README.md"), process.exit(1));
104
+ const d = i.substring(0, r + o.length) + `
105
+ ` + t + `
106
+ ` + i.substring(s);
107
+ await e.writeFile("README.md", d, "utf8"), console.log("✓ README.md updated with license information");
108
+ } catch (t) {
109
+ console.error("Error updating README:", t), process.exit(1);
110
+ }
111
+ }
112
+ async function k() {
100
113
  try {
101
114
  console.info("🚀 Sending deployment notice...");
102
- const o = JSON.parse(await n.readFile("config.json", "utf8")), e = {
115
+ const o = JSON.parse(await e.readFile("config.json", "utf8")), n = {
103
116
  body: JSON.stringify(o),
104
117
  headers: { "Content-Type": "application/json" },
105
118
  method: "PUT"
106
- }, i = await fetch(`https://api.datapos.app/states/${o.id}`, e);
107
- if (!i.ok) throw new Error(await i.text());
119
+ }, t = await fetch(`https://api.datapos.app/states/${o.id}`, n);
120
+ if (!t.ok) throw new Error(await t.text());
108
121
  console.info("✅ Deployment notice sent.");
109
122
  } catch (o) {
110
123
  console.error("❌ Error sending deployment notice.", o);
@@ -113,80 +126,81 @@ async function E() {
113
126
  async function I() {
114
127
  try {
115
128
  console.info("🚀 Synchronising with GitHub....");
116
- const o = JSON.parse(await n.readFile("package.json", "utf8"));
129
+ const o = JSON.parse(await e.readFile("package.json", "utf8"));
117
130
  await g("git add ."), await g(`git commit -m "v${o.version}"`), await g("git push origin main:main"), console.info(`✅ Synchronised version ${o.version} with GitHub.`);
118
131
  } catch (o) {
119
132
  console.error("❌ Error synchronising with GitHub.", o);
120
133
  }
121
134
  }
122
- async function R(o, e) {
135
+ async function D(o, n) {
123
136
  try {
124
137
  console.info("🚀 Uploading directory to R2....");
125
- async function i(t, r, d) {
126
- for (const s of d) {
127
- const a = `${t}/${s}`, l = `${r}/${s}`;
128
- if ((await n.stat(a)).isDirectory()) {
129
- const p = await n.readdir(a);
130
- await i(a, l, p);
138
+ async function t(r, s, d) {
139
+ for (const a of d) {
140
+ const c = `${r}/${a}`, l = `${s}/${a}`;
141
+ if ((await e.stat(c)).isDirectory()) {
142
+ const p = await e.readdir(c);
143
+ await t(c, l, p);
131
144
  } else {
132
- console.info(`âš™ī¸ Uploading '${t}/${s}'...`);
133
- const p = `wrangler r2 object put "datapos-sample-data-eu/${r}/${s}" --file="${t}/${s}" --jurisdiction=eu --remote`, u = await g(p);
145
+ console.info(`âš™ī¸ Uploading '${r}/${a}'...`);
146
+ const p = `wrangler r2 object put "datapos-sample-data-eu/${s}/${a}" --file="${r}/${a}" --jurisdiction=eu --remote`, u = await g(p);
134
147
  if (u.stderr) throw new Error(u.stderr);
135
148
  }
136
149
  }
137
150
  }
138
- const c = await n.readdir(`${o}/${e}/`);
139
- await i(`${o}/${e}`, e, c), console.info("✅ Directory uploaded to R2.");
140
- } catch (i) {
141
- console.error("❌ Error uploading directory to R2.", i);
151
+ const i = await e.readdir(`${o}/${n}/`);
152
+ await t(`${o}/${n}`, n, i), console.info("✅ Directory uploaded to R2.");
153
+ } catch (t) {
154
+ console.error("❌ Error uploading directory to R2.", t);
142
155
  }
143
156
  }
144
- async function P() {
157
+ async function A() {
145
158
  try {
146
159
  console.info("🚀 Uploading module configuration....");
147
- const o = JSON.parse(await n.readFile("config.json", "utf8")), e = o.id, i = {
160
+ const o = JSON.parse(await e.readFile("config.json", "utf8")), n = o.id, t = {
148
161
  body: JSON.stringify(o),
149
162
  headers: { "Content-Type": "application/json" },
150
163
  method: "PUT"
151
- }, c = await fetch(`https://api.datapos.app/states/${e}`, i);
152
- if (!c.ok) throw new Error(await c.text());
164
+ }, i = await fetch(`https://api.datapos.app/states/${n}`, t);
165
+ if (!i.ok) throw new Error(await i.text());
153
166
  console.info("✅ Module configuration uploaded.");
154
167
  } catch (o) {
155
168
  console.error("❌ Error uploading module configuration.", o);
156
169
  }
157
170
  }
158
- async function T(o) {
171
+ async function P(o) {
159
172
  try {
160
173
  console.info("🚀 Uploading module to R2...");
161
- const i = `v${JSON.parse(await n.readFile("package.json", "utf8")).version}`;
162
- async function c(t, r = "") {
163
- const d = await n.readdir(t, { withFileTypes: !0 });
164
- for (const s of d) {
165
- const a = `${t}/${s.name}`, l = r ? `${r}/${s.name}` : s.name;
166
- if (!s.isDirectory()) {
167
- const f = `${o}_${i}/${l}`.replace(/\\/g, "/"), p = s.name.endsWith(".js") ? "application/javascript" : s.name.endsWith(".css") ? "text/css" : "application/octet-stream";
174
+ const t = `v${JSON.parse(await e.readFile("package.json", "utf8")).version}`;
175
+ async function i(r, s = "") {
176
+ const d = await e.readdir(r, { withFileTypes: !0 });
177
+ for (const a of d) {
178
+ const c = `${r}/${a.name}`, l = s ? `${s}/${a.name}` : a.name;
179
+ if (!a.isDirectory()) {
180
+ const f = `${o}_${t}/${l}`.replace(/\\/g, "/"), p = a.name.endsWith(".js") ? "application/javascript" : a.name.endsWith(".css") ? "text/css" : "application/octet-stream";
168
181
  console.info(`âš™ī¸ Uploading '${l}' → '${f}'...`);
169
- const { stderr: u } = await g(`wrangler r2 object put "${f}" --file="${a}" --content-type ${p} --jurisdiction=eu --remote`);
182
+ const { stderr: u } = await g(`wrangler r2 object put "${f}" --file="${c}" --content-type ${p} --jurisdiction=eu --remote`);
170
183
  if (u) throw new Error(u);
171
184
  }
172
185
  }
173
186
  }
174
- await c("dist"), console.info("✅ Module uploaded to R2.");
175
- } catch (e) {
176
- console.error("❌ Error uploading module to R2.", e);
187
+ await i("dist"), console.info("✅ Module uploaded to R2.");
188
+ } catch (n) {
189
+ console.error("❌ Error uploading module to R2.", n);
177
190
  }
178
191
  }
179
192
  export {
180
193
  S as buildConfig,
181
- j as buildConnectorConfig,
194
+ J as buildConnectorConfig,
182
195
  x as buildContextConfig,
183
- F as buildPresenterConfig,
184
- J as buildPublicDirectoryIndex,
185
- k as bumpVersion,
186
- C as echoScriptNotImplemented,
187
- E as sendDeploymentNotice,
196
+ j as buildPresenterConfig,
197
+ O as buildPublicDirectoryIndex,
198
+ C as bumpVersion,
199
+ F as echoScriptNotImplemented,
200
+ R as insertLicensesIntoReadme,
201
+ k as sendDeploymentNotice,
188
202
  I as syncWithGitHub,
189
- R as uploadDirectoryToR2,
190
- P as uploadModuleConfigToDO,
191
- T as uploadModuleToR2
203
+ D as uploadDirectoryToR2,
204
+ A as uploadModuleConfigToDO,
205
+ P as uploadModuleToR2
192
206
  };
@@ -8,9 +8,10 @@ declare function buildContextConfig(): Promise<void>;
8
8
  declare function buildPresenterConfig(): Promise<void>;
9
9
  declare function bumpVersion(): Promise<void>;
10
10
  declare function echoScriptNotImplemented(name: string): void;
11
+ declare function insertLicensesIntoReadme(): Promise<void>;
11
12
  declare function sendDeploymentNotice(): Promise<void>;
12
13
  declare function syncWithGitHub(): Promise<void>;
13
14
  declare function uploadDirectoryToR2(sourceDirectory: string, uploadDirectory: string): Promise<void>;
14
15
  declare function uploadModuleConfigToDO(): Promise<void>;
15
16
  declare function uploadModuleToR2(uploadDirPath: string): Promise<void>;
16
- export { buildConfig, buildConnectorConfig, buildContextConfig, buildPresenterConfig, buildPublicDirectoryIndex, bumpVersion, echoScriptNotImplemented, sendDeploymentNotice, syncWithGitHub, uploadDirectoryToR2, uploadModuleConfigToDO, uploadModuleToR2 };
17
+ export { buildConfig, buildConnectorConfig, buildContextConfig, buildPresenterConfig, buildPublicDirectoryIndex, bumpVersion, echoScriptNotImplemented, insertLicensesIntoReadme, sendDeploymentNotice, syncWithGitHub, uploadDirectoryToR2, uploadModuleConfigToDO, uploadModuleToR2 };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@datapos/datapos-development",
3
- "version": "0.3.96",
3
+ "version": "0.3.102",
4
4
  "description": "A TypeScript library of utilities for managing the Data Positioning repositories.",
5
5
  "license": "MIT",
6
6
  "author": "Jonathan Terrell <terrell.jm@gmail.com>",