@datapos/datapos-development 0.3.61 → 0.3.63

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
- import { exec as w } from "child_process";
1
+ import { exec as m } from "child_process";
2
2
  import { promises as n } from "fs";
3
- import { promisify as m } from "util";
3
+ import { promisify as w } from "util";
4
4
  const y = "useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict";
5
5
  let b = (o = 21) => {
6
6
  let e = "", i = crypto.getRandomValues(new Uint8Array(o |= 0));
@@ -8,174 +8,176 @@ let b = (o = 21) => {
8
8
  e += y[i[o] & 63];
9
9
  return e;
10
10
  };
11
- const v = ["createObject", "dropObject", "removeRecords", "upsertRecords"], h = ["findObject", "getRecord", "listNodes", "previewObject", "retrieveRecords"], u = m(w);
11
+ const h = ["createObject", "dropObject", "removeRecords", "upsertRecords"], v = ["findObject", "getRecord", "listNodes", "previewObject", "retrieveRecords"], u = w(m);
12
12
  async function $() {
13
13
  try {
14
- console.log("🚀 Building configuration...");
14
+ console.info("🚀 Building configuration...");
15
15
  const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8"));
16
- o.name && (e.id = o.name), o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.log("✅ Configuration built.");
16
+ o.name && (e.id = o.name), o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Configuration built.");
17
17
  } catch (o) {
18
- console.warn("❌ Error building configuration.", o);
18
+ console.error("❌ Error building configuration.", o);
19
19
  }
20
20
  }
21
21
  async function J(o) {
22
22
  try {
23
- console.log(`🚀 Building public directory index for identifier '${o}'...`);
23
+ console.info(`🚀 Building public directory index for identifier '${o}'...`);
24
24
  const e = {};
25
- async function i(r, t) {
26
- console.log(`âš™ī¸ Processing directory '${r}'...`);
27
- const d = [], l = r.substring(`public/${o}`.length);
28
- e[l] = d;
29
- for (const a of t) {
30
- const c = `${r}/${a}`;
25
+ async function i(t, r) {
26
+ console.info(`âš™ī¸ Processing directory '${t}'...`);
27
+ const f = [], l = t.substring(`public/${o}`.length);
28
+ e[l] = f;
29
+ for (const a of r) {
30
+ const c = `${t}/${a}`;
31
31
  try {
32
- const g = await n.stat(c);
33
- if (g.isDirectory()) {
34
- const p = await n.readdir(c), f = { childCount: p.length, name: `${a}`, typeId: "folder" };
35
- d.push(f), await i(c, p);
32
+ const d = await n.stat(c);
33
+ if (d.isDirectory()) {
34
+ const p = await n.readdir(c), g = { childCount: p.length, name: `${a}`, typeId: "folder" };
35
+ f.push(g), await i(c, p);
36
36
  } else {
37
- const p = { id: b(), lastModifiedAt: g.mtimeMs, name: a, size: g.size, typeId: "object" };
38
- d.push(p);
37
+ const p = { id: b(), lastModifiedAt: d.mtimeMs, name: a, size: d.size, typeId: "object" };
38
+ f.push(p);
39
39
  }
40
- } catch (g) {
41
- console.error(`Unable to get information for '${a}' in 'buildPublicDirectoryIndex'.`, g);
40
+ } catch (d) {
41
+ throw new Error(`Unable to get information for '${a}' in 'buildPublicDirectoryIndex'. ${String(d)}`);
42
42
  }
43
43
  }
44
- d.sort((a, c) => {
45
- const g = a.typeId.localeCompare(c.typeId);
46
- return g !== 0 ? g : a.name.localeCompare(c.name);
44
+ f.sort((a, c) => {
45
+ const d = a.typeId.localeCompare(c.typeId);
46
+ return d !== 0 ? d : a.name.localeCompare(c.name);
47
47
  });
48
48
  }
49
49
  const s = await n.readdir(`public/${o}`);
50
- await i(`public/${o}`, s), await n.writeFile(`./public/${o}Index.json`, JSON.stringify(e), "utf8"), console.log("✅ Public directory index built.");
50
+ await i(`public/${o}`, s), await n.writeFile(`./public/${o}Index.json`, JSON.stringify(e), "utf8"), console.info("✅ Public directory index built.");
51
51
  } catch (e) {
52
- console.warn("❌ Error building public directory index.", e);
52
+ console.error("❌ Error building public directory index.", e);
53
53
  }
54
54
  }
55
55
  async function j() {
56
56
  try {
57
- console.log("🚀 Building connector configuration...");
57
+ console.info("🚀 Building connector configuration...");
58
58
  const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8");
59
- let s = !1, r = !1;
60
- const t = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, d = [...i.matchAll(t)].filter((a) => !a[1] && a[2] !== "constructor").map((a) => {
59
+ let s = !1, t = !1;
60
+ const r = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, f = [...i.matchAll(r)].filter((a) => !a[1] && a[2] !== "constructor").map((a) => {
61
61
  const c = a[2];
62
- return s = s || v.includes(c), r = r || h.includes(c), c;
62
+ return s = s || h.includes(c), t = t || v.includes(c), c;
63
63
  });
64
- d.length > 0 ? console.log(`â„šī¸ Implements ${d.length} operations.`) : console.log("âš ī¸ Implements no operations.");
65
- const l = r && s ? "bidirectional" : r ? "source" : s ? "destination" : null;
66
- console.log(l ? `â„šī¸ Supports ${l} usage.` : "âš ī¸ No usage identified."), o.name && (e.id = o.name), e.operations = d, e.usageId = l, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.log("✅ Connector configuration built.");
64
+ f.length > 0 ? console.info(`â„šī¸ Implements ${f.length} operations.`) : console.warn("âš ī¸ Implements no operations.");
65
+ const l = t && s ? "bidirectional" : t ? "source" : s ? "destination" : null;
66
+ l ? console.info(`â„šī¸ Supports ${l} usage.`) : console.warn("âš ī¸ No usage identified."), o.name && (e.id = o.name), e.operations = f, e.usageId = l, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Connector configuration built.");
67
67
  } catch (o) {
68
- console.warn("❌ Error building connector configuration.", o);
68
+ console.error("❌ Error building connector configuration.", o);
69
69
  }
70
70
  }
71
71
  async function x() {
72
72
  try {
73
- console.log("🚀 Building context configuration...");
74
- const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, r = [...i.matchAll(s)].filter((t) => !t[1] && t[2] !== "constructor").map((t) => t[2]);
75
- o.name && (e.id = o.name), e.operations = r, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8");
73
+ console.info("🚀 Building context configuration...");
74
+ const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, t = [...i.matchAll(s)].filter((r) => !r[1] && r[2] !== "constructor").map((r) => r[2]);
75
+ o.name && (e.id = o.name), e.operations = t, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Context configuration built.");
76
76
  } catch (o) {
77
- console.warn("❌ Error building context configuration.", o);
77
+ console.error("❌ Error building context configuration.", o);
78
78
  }
79
79
  }
80
80
  async function F() {
81
81
  try {
82
- console.log("🚀 Building informer configuration...");
83
- const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, r = [...i.matchAll(s)].filter((t) => !t[1] && t[2] !== "constructor").map((t) => t[2]);
84
- o.name && (e.id = o.name), e.operations = r, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8");
82
+ console.info("🚀 Building informer configuration...");
83
+ const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, t = [...i.matchAll(s)].filter((r) => !r[1] && r[2] !== "constructor").map((r) => r[2]);
84
+ o.name && (e.id = o.name), e.operations = t, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Informer configuration built.");
85
85
  } catch (o) {
86
- console.warn("❌ Error building informer configuration.", o);
86
+ console.error("❌ Error building informer configuration.", o);
87
87
  }
88
88
  }
89
89
  async function C() {
90
90
  try {
91
- console.log("🚀 Building presenter configuration...");
92
- const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, r = [...i.matchAll(s)].filter((t) => !t[1] && t[2] !== "constructor").map((t) => t[2]);
93
- o.name && (e.id = o.name), e.operations = r, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8");
91
+ console.info("🚀 Building presenter configuration...");
92
+ const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, t = [...i.matchAll(s)].filter((r) => !r[1] && r[2] !== "constructor").map((r) => r[2]);
93
+ o.name && (e.id = o.name), e.operations = t, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Presenter configuration built.");
94
94
  } catch (o) {
95
- console.warn("❌ Error building context configuration.", o);
95
+ console.error("❌ Error building context configuration.", o);
96
96
  }
97
97
  }
98
98
  async function k() {
99
99
  try {
100
- console.log("🚀 Bumping version...");
100
+ console.info("🚀 Bumping version...");
101
101
  const o = JSON.parse(await n.readFile("package.json", "utf8"));
102
102
  if (o.version) {
103
103
  const e = o.version, i = o.version.split(".");
104
- o.version = `${i[0]}.${i[1]}.${Number(i[2]) + 1}`, await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.log(`✅ Version bumped from ${e} to ${o.version}.`);
104
+ o.version = `${i[0]}.${i[1]}.${Number(i[2]) + 1}`, await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.info(`✅ Version bumped from ${e} to ${o.version}.`);
105
105
  } else
106
- o.version = "0.0.001", await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.log(`âš ī¸ Version initialised to ${o.version}.`);
106
+ o.version = "0.0.001", await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.warn(`âš ī¸ Version initialised to ${o.version}.`);
107
107
  } catch (o) {
108
- console.warn("❌ Error bumping package version.", o);
108
+ console.error("❌ Error bumping package version.", o);
109
109
  }
110
110
  }
111
- async function E() {
111
+ function E(o) {
112
+ console.error(`❌ ${o} script not implemented.`);
113
+ }
114
+ async function I() {
112
115
  try {
113
- console.log("🚀 Sending deployment notice...");
116
+ console.info("🚀 Sending deployment notice...");
114
117
  const o = JSON.parse(await n.readFile("config.json", "utf8")), e = {
115
118
  body: JSON.stringify(o),
116
119
  headers: { "Content-Type": "application/json" },
117
120
  method: "PUT"
118
121
  }, i = await fetch(`https://api.datapos.app/states/${o.id}`, e);
119
- if (!i.ok)
120
- throw console.log("❌", await i.text()), new Error("Fetch error.");
121
- console.log("✅ Deployment notice sent.");
122
+ if (!i.ok) throw new Error(await i.text());
123
+ console.info("✅ Deployment notice sent.");
122
124
  } catch (o) {
123
- console.warn("❌ Error sending deployment notice.", o);
125
+ console.error("❌ Error sending deployment notice.", o);
124
126
  }
125
127
  }
126
- async function I() {
128
+ async function R() {
127
129
  try {
128
- console.log("🚀 Synchronising with GitHub....");
130
+ console.info("🚀 Synchronising with GitHub....");
129
131
  const o = JSON.parse(await n.readFile("package.json", "utf8"));
130
- await u("git add ."), await u(`git commit -m "v${o.version}"`), await u("git push origin main:main"), console.log(`✅ Synchronised version ${o.version} with GitHub.`);
132
+ await u("git add ."), await u(`git commit -m "v${o.version}"`), await u("git push origin main:main"), console.info(`✅ Synchronised version ${o.version} with GitHub.`);
131
133
  } catch (o) {
132
- console.warn("❌ Error synchronising with GitHub.", o);
134
+ console.error("❌ Error synchronising with GitHub.", o);
133
135
  }
134
136
  }
135
- async function R(o, e) {
137
+ async function A(o, e) {
136
138
  try {
137
- console.log("🚀 Uploading directory to R2....");
138
- async function i(r, t, d) {
139
- for (const l of d) {
140
- const a = `${r}/${l}`, c = `${t}/${l}`;
139
+ console.info("🚀 Uploading directory to R2....");
140
+ async function i(t, r, f) {
141
+ for (const l of f) {
142
+ const a = `${t}/${l}`, c = `${r}/${l}`;
141
143
  if ((await n.stat(a)).isDirectory()) {
142
144
  const p = await n.readdir(a);
143
145
  await i(a, c, p);
144
146
  } else {
145
- const p = `wrangler r2 object put "datapos-sample-data-eu/${t}/${l}" --file="${r}/${l}" --jurisdiction=eu --remote`, f = await u(p);
146
- if (console.log(`âš™ī¸ Uploading '${r}/${l}'.`), f.stderr)
147
- throw console.log("❌", f.stderr), new Error("Upload error.");
147
+ console.info(`âš™ī¸ Uploading '${t}/${l}'.`);
148
+ const p = `wrangler r2 object put "datapos-sample-data-eu/${r}/${l}" --file="${t}/${l}" --jurisdiction=eu --remote`, g = await u(p);
149
+ if (g.stderr) throw new Error(g.stderr);
148
150
  }
149
151
  }
150
152
  }
151
153
  const s = await n.readdir(`${o}/${e}/`);
152
- await i(`${o}/${e}`, e, s), console.log("✅ Directory uploaded to R2.");
154
+ await i(`${o}/${e}`, e, s), console.info("✅ Directory uploaded to R2.");
153
155
  } catch (i) {
154
- console.warn("❌ Error uploading directory to R2.", i);
156
+ console.error("❌ Error uploading directory to R2.", i);
155
157
  }
156
158
  }
157
- async function A() {
159
+ async function P() {
158
160
  try {
159
- console.log("🚀 Uploading module configuration....");
161
+ console.info("🚀 Uploading module configuration....");
160
162
  const o = JSON.parse(await n.readFile("config.json", "utf8")), e = o.id, i = {
161
163
  body: JSON.stringify(o),
162
164
  headers: { "Content-Type": "application/json" },
163
165
  method: "PUT"
164
166
  }, s = await fetch(`https://api.datapos.app/states/${e}`, i);
165
- s.ok || console.log(await s.text()), console.log("✅ Module configuration uploaded.");
167
+ if (!s.ok) throw new Error(await s.text());
168
+ console.info("✅ Module configuration uploaded.");
166
169
  } catch (o) {
167
- console.warn("❌ Error uploading module configuration.", o);
170
+ console.error("❌ Error uploading module configuration.", o);
168
171
  }
169
172
  }
170
- async function P(o, e) {
173
+ async function U(o, e) {
171
174
  try {
172
- console.log("🚀 Uploading module to R2....");
173
- const i = JSON.parse(await n.readFile("package.json", "utf8")), s = e.replace(/^(.*?\.)/, `$1v${i.version}.`), { stdout: r, stderr: t } = await u(
174
- `wrangler r2 object put ${s} --file=dist/${o} --content-type application/javascript --jurisdiction=eu --remote`
175
- );
176
- r && console.log(r), t && console.error(t), console.log("✅ Module uploaded to R2.");
175
+ console.info("🚀 Uploading module to R2....");
176
+ const i = JSON.parse(await n.readFile("package.json", "utf8")), s = e.replace(/^(.*?\.)/, `$1v${i.version}.`), { stderr: t } = await u(`wrangler r2 object put ${s} --file=dist/${o} --content-type application/javascript --jurisdiction=eu --remote`);
177
+ if (t) throw new Error(t);
178
+ console.info("✅ Module uploaded to R2.");
177
179
  } catch (i) {
178
- console.warn("❌ Error uploading module to R2.", i);
180
+ console.error("❌ Error uploading module to R2.", i);
179
181
  }
180
182
  }
181
183
  export {
@@ -186,9 +188,10 @@ export {
186
188
  C as buildPresenterConfig,
187
189
  J as buildPublicDirectoryIndex,
188
190
  k as bumpVersion,
189
- E as sendDeploymentNotice,
190
- I as syncWithGitHub,
191
- R as uploadDirectoryToR2,
192
- A as uploadModuleConfig,
193
- P as uploadModuleToR2
191
+ E as echoScriptNotImplemented,
192
+ I as sendDeploymentNotice,
193
+ R as syncWithGitHub,
194
+ A as uploadDirectoryToR2,
195
+ P as uploadModuleConfig,
196
+ U as uploadModuleToR2
194
197
  };
@@ -8,9 +8,10 @@ declare function buildContextConfig(): Promise<void>;
8
8
  declare function buildInformerConfig(): Promise<void>;
9
9
  declare function buildPresenterConfig(): Promise<void>;
10
10
  declare function bumpVersion(): Promise<void>;
11
+ declare function echoScriptNotImplemented(name: string): void;
11
12
  declare function sendDeploymentNotice(): Promise<void>;
12
13
  declare function syncWithGitHub(): Promise<void>;
13
14
  declare function uploadDirectoryToR2(sourceDirectory: string, uploadDirectory: string): Promise<void>;
14
15
  declare function uploadModuleConfig(): Promise<void>;
15
16
  declare function uploadModuleToR2(fromPath: string, toPath: string): Promise<void>;
16
- export { buildConfig, buildConnectorConfig, buildContextConfig, buildInformerConfig, buildPresenterConfig, buildPublicDirectoryIndex, bumpVersion, sendDeploymentNotice, syncWithGitHub, uploadDirectoryToR2, uploadModuleConfig, uploadModuleToR2 };
17
+ export { buildConfig, buildConnectorConfig, buildContextConfig, buildInformerConfig, buildPresenterConfig, buildPublicDirectoryIndex, bumpVersion, echoScriptNotImplemented, sendDeploymentNotice, syncWithGitHub, uploadDirectoryToR2, uploadModuleConfig, uploadModuleToR2 };
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "license": "MIT",
4
4
  "private": false,
5
5
  "type": "module",
6
- "version": "0.3.61",
6
+ "version": "0.3.63",
7
7
  "files": [
8
8
  "dist"
9
9
  ],
@@ -37,16 +37,16 @@
37
37
  "scripts": {
38
38
  "audit": "npm audit",
39
39
  "build": "vite build",
40
- "buildConfig": "echo \"***** BUILD CONFIG SCRIPT NOT IMPLEMENTED. *****\"",
41
- "bumpVersion": "node -e \"import('./dist/datapos-development.es.js').then(m => m.bumpVersion())\"",
40
+ "bump:version": "node -e \"import('./dist/datapos-development.es.js').then(m => m.bumpVersion())\"",
42
41
  "check": "npm outdated; npm-check-updates -i && retire",
43
42
  "document": "license-report --only=prod,peer > LICENSES.json && license-report-check --source ./LICENSES.json --allowed 'MIT' --allowed 'n/a' --allowed 'Apache-2.0' --output=table",
44
43
  "format": "prettier --write src/",
45
44
  "lint": "eslint .",
46
- "publishToNPM": "npm publish --access public",
47
- "release": "npm run syncWithGitHub && npm run build && npm run publishToNPM",
48
- "syncWithGitHub": "npm run bumpVersion && node -e \"import('./dist/datapos-development.es.js').then(m => m.syncWithGitHub())\"",
49
- "test": "echo \"***** TEST SCRIPT NOT IMPLEMENTED. *****\"",
50
- "updateDependencies": "npm install -D @datapos/datapos-shared@latest"
45
+ "publish:toNPM": "npm publish --access public",
46
+ "release": "npm run sync:withGitHub && npm run build && npm run publish:toNPM",
47
+ "sync:withGitHub": "npm run bump:version && node -e \"import('./dist/datapos-development.es.js').then(m => m.syncWithGitHub())\"",
48
+ "test": "node -e \"import('./dist/datapos-development.es.js').then(m => m.echoScriptNotImplemented('Test'))\"",
49
+ "update:dataPosDeps": "npm run _update:developDeps",
50
+ "_update:developDeps": "npm install --save-dev @datapos/datapos-development@latest"
51
51
  }
52
52
  }