@datapos/datapos-development 0.3.133 β 0.3.135
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -4
- package/dist/datapos-development.es.js +76 -79
- package/package.json +8 -7
package/README.md
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# Data Positioning Development Library
|
|
2
2
|
|
|
3
3
|
[](https://sonarcloud.io/summary/new_code?id=data-positioning_datapos-development)
|
|
4
|
-
<span><!-- OWASP_BADGE_START -->[](https://data-positioning.github.io/test-lib/dependency-check-reports/dependency-check-report.html)<!-- OWASP_BADGE_END --></span>
|
|
5
5
|
[](https://www.npmjs.com/package/@datapos/datapos-development)
|
|
6
6
|
[](./LICENSE)
|
|
7
7
|
|
|
@@ -62,14 +62,13 @@ The OWASP Dependency Check Report identifies known vulnerabilities in project de
|
|
|
62
62
|
|
|
63
63
|
### Dependency Licenses
|
|
64
64
|
|
|
65
|
-
The following table lists top-level production and peer dependencies
|
|
65
|
+
The following table lists top-level production and peer dependencies. All these dependencies (including transitive ones) have been recursively verified to use Apache-2.0, CC0-1.0, or MITβcommercially friendly licenses with minimal restrictions. Developers cloning this repository should independently verify dev and optional dependencies; users of the published library are covered by these checks.
|
|
66
66
|
|
|
67
67
|
<!-- DEPENDENCY_LICENSES_START -->
|
|
68
|
-
|
|
69
68
|
| Name | Type | Installed | Latest | Latest Modified |
|
|
70
69
|
| :---------------------- | :--: | :-------: | :-----: | :----------------------- |
|
|
71
70
|
| @datapos/datapos-shared | MIT | 0.3.252 | 0.3.252 | 2025-11-25T16:48:28.532Z |
|
|
72
|
-
|
|
71
|
+
| node-pty | MIT | 1.0.0 | 1.0.0 | 2025-11-03T11:45:17.960Z |
|
|
73
72
|
<!-- DEPENDENCY_LICENSES_END -->
|
|
74
73
|
|
|
75
74
|
### Bundle Analysis Report
|
|
@@ -1,63 +1,63 @@
|
|
|
1
1
|
import { exec as b } from "node:child_process";
|
|
2
|
-
import { promises as
|
|
2
|
+
import { promises as n } from "node:fs";
|
|
3
3
|
import { nanoid as $ } from "nanoid";
|
|
4
4
|
import { promisify as O } from "node:util";
|
|
5
5
|
const S = ["createObject", "dropObject", "removeRecords", "upsertRecords"], v = ["findObject", "getRecord", "listNodes", "previewObject", "retrieveRecords"], y = O(b);
|
|
6
6
|
async function j() {
|
|
7
7
|
try {
|
|
8
8
|
console.info("π Building configuration...");
|
|
9
|
-
const e = JSON.parse(await
|
|
10
|
-
e.name != null && (o.id = e.name.replace("@datapos/", "").replace("@data-positioning/", "")), e.version != null && (o.version = e.version), await
|
|
9
|
+
const e = JSON.parse(await n.readFile("package.json", "utf8")), o = JSON.parse(await n.readFile("config.json", "utf8"));
|
|
10
|
+
e.name != null && (o.id = e.name.replace("@datapos/", "").replace("@data-positioning/", "")), e.version != null && (o.version = e.version), await n.writeFile("config.json", JSON.stringify(o, void 0, 4), "utf8"), console.info("β
Configuration built.");
|
|
11
11
|
} catch (e) {
|
|
12
12
|
console.error("β Error building configuration.", e);
|
|
13
13
|
}
|
|
14
14
|
}
|
|
15
|
-
async function
|
|
15
|
+
async function J(e) {
|
|
16
16
|
try {
|
|
17
17
|
console.info(`π Building public directory index for identifier '${e}'...`);
|
|
18
18
|
const o = {};
|
|
19
19
|
async function i(s, r) {
|
|
20
20
|
console.info(`βοΈ Processing directory '${s}'...`);
|
|
21
|
-
const
|
|
22
|
-
o[c] =
|
|
21
|
+
const l = [], c = s.substring(`public/${e}`.length);
|
|
22
|
+
o[c] = l;
|
|
23
23
|
for (const a of r) {
|
|
24
|
-
const
|
|
24
|
+
const d = `${s}/${a}`;
|
|
25
25
|
try {
|
|
26
|
-
const f = await
|
|
26
|
+
const f = await n.stat(d);
|
|
27
27
|
if (f.isDirectory()) {
|
|
28
|
-
const u = await
|
|
29
|
-
|
|
28
|
+
const u = await n.readdir(d), p = { childCount: u.length, name: `${a}`, typeId: "folder" };
|
|
29
|
+
l.push(p), await i(d, u);
|
|
30
30
|
} else {
|
|
31
31
|
const u = { id: $(), lastModifiedAt: f.mtimeMs, name: a, size: f.size, typeId: "object" };
|
|
32
|
-
|
|
32
|
+
l.push(u);
|
|
33
33
|
}
|
|
34
34
|
} catch (f) {
|
|
35
35
|
throw new Error(`Unable to get information for '${a}' in 'buildPublicDirectoryIndex'. ${String(f)}`);
|
|
36
36
|
}
|
|
37
37
|
}
|
|
38
|
-
|
|
39
|
-
const f = a.typeId.localeCompare(
|
|
40
|
-
return f === 0 ? a.name.localeCompare(
|
|
38
|
+
l.sort((a, d) => {
|
|
39
|
+
const f = a.typeId.localeCompare(d.typeId);
|
|
40
|
+
return f === 0 ? a.name.localeCompare(d.name) : f;
|
|
41
41
|
});
|
|
42
42
|
}
|
|
43
|
-
const
|
|
44
|
-
await i(`public/${e}`,
|
|
43
|
+
const t = await n.readdir(`public/${e}`);
|
|
44
|
+
await i(`public/${e}`, t), await n.writeFile(`./public/${e}Index.json`, JSON.stringify(o), "utf8"), console.info("β
Public directory index built.");
|
|
45
45
|
} catch (o) {
|
|
46
46
|
console.error("β Error building public directory index.", o);
|
|
47
47
|
}
|
|
48
48
|
}
|
|
49
|
-
async function
|
|
49
|
+
async function k() {
|
|
50
50
|
try {
|
|
51
51
|
console.info("π Building connector configuration...");
|
|
52
|
-
const e = JSON.parse(await
|
|
53
|
-
let
|
|
54
|
-
const r = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm,
|
|
55
|
-
const
|
|
56
|
-
return
|
|
52
|
+
const e = JSON.parse(await n.readFile("package.json", "utf8")), o = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8");
|
|
53
|
+
let t = !1, s = !1;
|
|
54
|
+
const r = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, l = [...i.matchAll(r)].filter((a) => a[1] == null && a[2] !== "constructor").map((a) => {
|
|
55
|
+
const d = a[2];
|
|
56
|
+
return t = t || S.includes(d), s = s || v.includes(d), d;
|
|
57
57
|
});
|
|
58
|
-
|
|
59
|
-
const c = s &&
|
|
60
|
-
c && console.info(`βΉοΈ Supports ${c} usage.`), e.name != null && (o.id = e.name), o.operations =
|
|
58
|
+
l.length > 0 ? console.info(`βΉοΈ Implements ${l.length} operations.`) : console.warn("β οΈ Implements no operations.");
|
|
59
|
+
const c = s && t ? "bidirectional" : s ? "source" : t ? "destination" : "unknown";
|
|
60
|
+
c && console.info(`βΉοΈ Supports ${c} usage.`), e.name != null && (o.id = e.name), o.operations = l, o.usageId = c, e.version != null && (o.version = e.version), await n.writeFile("config.json", JSON.stringify(o, void 0, 4), "utf8"), console.info("β
Connector configuration built.");
|
|
61
61
|
} catch (e) {
|
|
62
62
|
console.error("β Error building connector configuration.", e);
|
|
63
63
|
}
|
|
@@ -65,8 +65,8 @@ async function J() {
|
|
|
65
65
|
async function F() {
|
|
66
66
|
try {
|
|
67
67
|
console.info("π Building context configuration...");
|
|
68
|
-
const e = JSON.parse(await
|
|
69
|
-
e.name != null && (o.id = e.name), o.operations = s, e.version != null && (o.version = e.version), await
|
|
68
|
+
const e = JSON.parse(await n.readFile("package.json", "utf8")), o = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), t = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, s = [...i.matchAll(t)].filter((r) => r[1] == null && r[2] !== "constructor").map((r) => r[2]);
|
|
69
|
+
e.name != null && (o.id = e.name), o.operations = s, e.version != null && (o.version = e.version), await n.writeFile("config.json", JSON.stringify(o, void 0, 4), "utf8"), console.info("β
Context configuration built.");
|
|
70
70
|
} catch (e) {
|
|
71
71
|
console.error("β Error building context configuration.", e);
|
|
72
72
|
}
|
|
@@ -74,8 +74,8 @@ async function F() {
|
|
|
74
74
|
async function R() {
|
|
75
75
|
try {
|
|
76
76
|
console.info("π Building presenter configuration...");
|
|
77
|
-
const e = JSON.parse(await
|
|
78
|
-
e.name != null && (o.id = e.name), o.operations = s, e.version != null && (o.version = e.version), await
|
|
77
|
+
const e = JSON.parse(await n.readFile("package.json", "utf8")), o = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), t = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, s = [...i.matchAll(t)].filter((r) => !r[1] && r[2] !== "constructor").map((r) => r[2]);
|
|
78
|
+
e.name != null && (o.id = e.name), o.operations = s, e.version != null && (o.version = e.version), await n.writeFile("config.json", JSON.stringify(o, void 0, 4), "utf8"), console.info("β
Presenter configuration built.");
|
|
79
79
|
} catch (e) {
|
|
80
80
|
console.error("β Error building context configuration.", e);
|
|
81
81
|
}
|
|
@@ -83,12 +83,12 @@ async function R() {
|
|
|
83
83
|
async function A(e = "./") {
|
|
84
84
|
try {
|
|
85
85
|
console.info("π Bumping version...");
|
|
86
|
-
const o = JSON.parse(await
|
|
86
|
+
const o = JSON.parse(await n.readFile(`${e}package.json`, "utf8"));
|
|
87
87
|
if (o.version == null)
|
|
88
|
-
o.version = "0.0.001", await
|
|
88
|
+
o.version = "0.0.001", await n.writeFile(`${e}package.json`, JSON.stringify(o, void 0, 4), "utf8"), console.warn(`β οΈ Version initialised to ${o.version}.`);
|
|
89
89
|
else {
|
|
90
|
-
const i = o.version,
|
|
91
|
-
o.version = `${
|
|
90
|
+
const i = o.version, t = o.version.split(".");
|
|
91
|
+
o.version = `${t[0]}.${t[1]}.${Number(t[2]) + 1}`, await n.writeFile(`${e}package.json`, JSON.stringify(o, void 0, 4), "utf8"), console.info(`β
Version bumped from ${i} to ${o.version}.`);
|
|
92
92
|
}
|
|
93
93
|
} catch (o) {
|
|
94
94
|
console.error("β Error bumping package version.", o);
|
|
@@ -100,12 +100,12 @@ function D(e) {
|
|
|
100
100
|
async function P() {
|
|
101
101
|
const e = "<!-- DEPENDENCY_LICENSES_START -->", o = "<!-- DEPENDENCY_LICENSES_END -->";
|
|
102
102
|
try {
|
|
103
|
-
const i = (await
|
|
103
|
+
const i = (await n.readFile("./licenses.md", "utf8")).trim(), t = await n.readFile("./README.md", "utf8"), s = t.indexOf(e), r = t.indexOf(o);
|
|
104
104
|
(s === -1 || r === -1) && (console.error("β Dependency license markers not found in readme file."), process.exit(1));
|
|
105
|
-
const
|
|
105
|
+
const l = t.substring(0, s + e.length) + `
|
|
106
106
|
` + i + `
|
|
107
|
-
` +
|
|
108
|
-
await
|
|
107
|
+
` + t.substring(r);
|
|
108
|
+
await n.writeFile("README.md", l, "utf8"), console.log("β
Readme file updated with license information");
|
|
109
109
|
} catch (i) {
|
|
110
110
|
console.error("β Error updating readme file.", i), process.exit(1);
|
|
111
111
|
}
|
|
@@ -113,12 +113,12 @@ async function P() {
|
|
|
113
113
|
async function I() {
|
|
114
114
|
const e = "<!-- OWASP_BADGE_START -->", o = "<!-- OWASP_BADGE_END -->";
|
|
115
115
|
try {
|
|
116
|
-
const i = JSON.parse(await
|
|
116
|
+
const i = JSON.parse(await n.readFile("./dependency-check-reports/dependency-check-report.json", "utf-8")), t = { critical: 0, high: 0, moderate: 0, low: 0, unknown: 0 };
|
|
117
117
|
for (const g of i.dependencies)
|
|
118
118
|
if (g.vulnerabilities != null)
|
|
119
119
|
for (const m of g.vulnerabilities) {
|
|
120
120
|
const w = m.severity?.toLowerCase() ?? "unknown";
|
|
121
|
-
w in
|
|
121
|
+
w in t ? t[w]++ : t.unknown++;
|
|
122
122
|
}
|
|
123
123
|
const s = {
|
|
124
124
|
critical: { color: "D32F2F", label: "critical" },
|
|
@@ -126,23 +126,20 @@ async function I() {
|
|
|
126
126
|
moderate: { color: "FBC02D", label: "moderate" },
|
|
127
127
|
low: { color: "6D8C31", label: "low" },
|
|
128
128
|
unknown: { color: "616161", label: "unknown" }
|
|
129
|
-
}, r =
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
);
|
|
133
|
-
const d = JSON.parse(await t.readFile("config.json", "utf8")), c = [];
|
|
134
|
-
if (r === 0)
|
|
135
|
-
c.push(`[](https://data-positioning.github.io/${d.id}/dependency-check-reports/dependency-check-report.html)`);
|
|
129
|
+
}, r = JSON.parse(await n.readFile("config.json", "utf8")), l = [];
|
|
130
|
+
if (Object.values(t).reduce((g, m) => g + m, 0) === 0)
|
|
131
|
+
console.info("β
No vulnerabilities found."), l.push(`[](https://data-positioning.github.io/${r.id}/dependency-check-reports/dependency-check-report.html)`);
|
|
136
132
|
else
|
|
137
|
-
for (const [g, m] of Object.entries(
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
133
|
+
for (const [g, m] of Object.entries(t)) {
|
|
134
|
+
const w = s[g];
|
|
135
|
+
if (console.warn(`β οΈ ${m} ${w.label} vulnerability(ies) found.`), m === 0) continue;
|
|
136
|
+
const h = `https://img.shields.io/badge/OWASP-${m}%20${w.label}-${w.color}`;
|
|
137
|
+
l.push(`[](https://data-positioning.github.io/${r.id}/dependency-check-reports/dependency-check-report.html)`);
|
|
141
138
|
}
|
|
142
|
-
const a = await
|
|
143
|
-
(
|
|
144
|
-
const u =
|
|
145
|
-
await
|
|
139
|
+
const a = await n.readFile("./README.md", "utf8"), d = a.indexOf(e), f = a.indexOf(o);
|
|
140
|
+
(d === -1 || f === -1) && (console.error("β OWASP badge markers not found in README.md."), process.exit(1));
|
|
141
|
+
const u = l.join(" "), p = a.substring(0, d + e.length) + u + a.substring(f);
|
|
142
|
+
await n.writeFile("README.md", p, "utf8"), console.info("β
OWASP dependency check badge(s) inserted into README.md");
|
|
146
143
|
} catch (i) {
|
|
147
144
|
console.error("β Error updating README with OWASP badges:", i), process.exit(1);
|
|
148
145
|
}
|
|
@@ -150,7 +147,7 @@ async function I() {
|
|
|
150
147
|
async function M() {
|
|
151
148
|
try {
|
|
152
149
|
console.info("π Sending deployment notice...");
|
|
153
|
-
const e = JSON.parse(await
|
|
150
|
+
const e = JSON.parse(await n.readFile("config.json", "utf8")), o = {
|
|
154
151
|
body: JSON.stringify(e),
|
|
155
152
|
headers: { "Content-Type": "application/json" },
|
|
156
153
|
method: "PUT"
|
|
@@ -164,7 +161,7 @@ async function M() {
|
|
|
164
161
|
async function T() {
|
|
165
162
|
try {
|
|
166
163
|
console.info("π Synchronising with GitHub....");
|
|
167
|
-
const e = JSON.parse(await
|
|
164
|
+
const e = JSON.parse(await n.readFile("package.json", "utf8"));
|
|
168
165
|
await y("git add ."), await y(`git commit -m "v${e.version}"`), await y("git push origin main:main"), console.info(`β
Synchronised version ${e.version} with GitHub.`);
|
|
169
166
|
} catch (e) {
|
|
170
167
|
console.error("β Error synchronising with GitHub.", e);
|
|
@@ -173,12 +170,12 @@ async function T() {
|
|
|
173
170
|
async function _(e, o) {
|
|
174
171
|
try {
|
|
175
172
|
console.info("π Uploading directory to R2....");
|
|
176
|
-
async function i(s, r,
|
|
177
|
-
for (const c of
|
|
178
|
-
const a = `${s}/${c}`,
|
|
179
|
-
if ((await
|
|
180
|
-
const u = await
|
|
181
|
-
await i(a,
|
|
173
|
+
async function i(s, r, l) {
|
|
174
|
+
for (const c of l) {
|
|
175
|
+
const a = `${s}/${c}`, d = `${r}/${c}`;
|
|
176
|
+
if ((await n.stat(a)).isDirectory()) {
|
|
177
|
+
const u = await n.readdir(a);
|
|
178
|
+
await i(a, d, u);
|
|
182
179
|
} else {
|
|
183
180
|
console.info(`βοΈ Uploading '${s}/${c}'...`);
|
|
184
181
|
const u = `wrangler r2 object put "datapos-sample-data-eu/${r}/${c}" --file="${s}/${c}" --jurisdiction=eu --remote`, p = await y(u);
|
|
@@ -186,8 +183,8 @@ async function _(e, o) {
|
|
|
186
183
|
}
|
|
187
184
|
}
|
|
188
185
|
}
|
|
189
|
-
const
|
|
190
|
-
await i(`${e}/${o}`, o,
|
|
186
|
+
const t = await n.readdir(`${e}/${o}/`);
|
|
187
|
+
await i(`${e}/${o}`, o, t), console.info("β
Directory uploaded to R2.");
|
|
191
188
|
} catch (i) {
|
|
192
189
|
console.error("β Error uploading directory to R2.", i);
|
|
193
190
|
}
|
|
@@ -195,44 +192,44 @@ async function _(e, o) {
|
|
|
195
192
|
async function W() {
|
|
196
193
|
try {
|
|
197
194
|
console.info("π Uploading module configuration....");
|
|
198
|
-
const e = JSON.parse(await
|
|
195
|
+
const e = JSON.parse(await n.readFile("config.json", "utf8")), o = e.id, i = {
|
|
199
196
|
body: JSON.stringify(e),
|
|
200
197
|
headers: { "Content-Type": "application/json" },
|
|
201
198
|
method: "PUT"
|
|
202
|
-
},
|
|
203
|
-
if (!
|
|
199
|
+
}, t = await fetch(`https://api.datapos.app/states/${o}`, i);
|
|
200
|
+
if (!t.ok) throw new Error(await t.text());
|
|
204
201
|
console.info("β
Module configuration uploaded.");
|
|
205
202
|
} catch (e) {
|
|
206
203
|
console.error("β Error uploading module configuration.", e);
|
|
207
204
|
}
|
|
208
205
|
}
|
|
209
|
-
async function
|
|
206
|
+
async function B(e) {
|
|
210
207
|
try {
|
|
211
208
|
console.info("π Uploading module to R2...");
|
|
212
|
-
const i = `v${JSON.parse(await
|
|
213
|
-
async function
|
|
214
|
-
const
|
|
215
|
-
for (const c of
|
|
216
|
-
const a = `${s}/${c.name}`,
|
|
209
|
+
const i = `v${JSON.parse(await n.readFile("package.json", "utf8")).version}`;
|
|
210
|
+
async function t(s, r = "") {
|
|
211
|
+
const l = await n.readdir(s, { withFileTypes: !0 });
|
|
212
|
+
for (const c of l) {
|
|
213
|
+
const a = `${s}/${c.name}`, d = r ? `${r}/${c.name}` : c.name;
|
|
217
214
|
if (!c.isDirectory()) {
|
|
218
|
-
const f = `${e}_${i}/${
|
|
219
|
-
console.info(`βοΈ Uploading '${
|
|
215
|
+
const f = `${e}_${i}/${d}`.replace(/\\/g, "/"), u = c.name.endsWith(".js") ? "application/javascript" : c.name.endsWith(".css") ? "text/css" : "application/octet-stream";
|
|
216
|
+
console.info(`βοΈ Uploading '${d}' β '${f}'...`);
|
|
220
217
|
const { stderr: p } = await y(`wrangler r2 object put "${f}" --file="${a}" --content-type ${u} --jurisdiction=eu --remote`);
|
|
221
218
|
if (p) throw new Error(p);
|
|
222
219
|
}
|
|
223
220
|
}
|
|
224
221
|
}
|
|
225
|
-
await
|
|
222
|
+
await t("dist"), console.info("β
Module uploaded to R2.");
|
|
226
223
|
} catch (o) {
|
|
227
224
|
console.error("β Error uploading module to R2.", o);
|
|
228
225
|
}
|
|
229
226
|
}
|
|
230
227
|
export {
|
|
231
228
|
j as buildConfig,
|
|
232
|
-
|
|
229
|
+
k as buildConnectorConfig,
|
|
233
230
|
F as buildContextConfig,
|
|
234
231
|
R as buildPresenterConfig,
|
|
235
|
-
|
|
232
|
+
J as buildPublicDirectoryIndex,
|
|
236
233
|
A as bumpVersion,
|
|
237
234
|
D as echoScriptNotImplemented,
|
|
238
235
|
P as insertLicensesIntoReadme,
|
|
@@ -241,5 +238,5 @@ export {
|
|
|
241
238
|
T as syncWithGitHub,
|
|
242
239
|
_ as uploadDirectoryToR2,
|
|
243
240
|
W as uploadModuleConfigToDO,
|
|
244
|
-
|
|
241
|
+
B as uploadModuleToR2
|
|
245
242
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@datapos/datapos-development",
|
|
3
|
-
"version": "0.3.
|
|
3
|
+
"version": "0.3.135",
|
|
4
4
|
"description": "A library of utilities for managing the Data Positioning repositories.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "Jonathan Terrell <terrell.jm@gmail.com>",
|
|
@@ -26,7 +26,8 @@
|
|
|
26
26
|
"dist"
|
|
27
27
|
],
|
|
28
28
|
"dependencies": {
|
|
29
|
-
"@datapos/datapos-shared": "^0.3.252"
|
|
29
|
+
"@datapos/datapos-shared": "^0.3.252",
|
|
30
|
+
"node-pty": "^1.0.0"
|
|
30
31
|
},
|
|
31
32
|
"devDependencies": {
|
|
32
33
|
"@types/node": "^24.10.1",
|
|
@@ -41,8 +42,7 @@
|
|
|
41
42
|
"nanoid": "^5.1.6",
|
|
42
43
|
"npm-check-updates": "^19.1.2",
|
|
43
44
|
"owasp-dependency-check": "^1.0.0",
|
|
44
|
-
"prettier": "^3.
|
|
45
|
-
"retire": "^5.3.0",
|
|
45
|
+
"prettier": "^3.7.1",
|
|
46
46
|
"rollup-plugin-visualizer": "^6.0.5",
|
|
47
47
|
"ts-to-zod": "^5.1.0",
|
|
48
48
|
"type-fest": "^5.2.0",
|
|
@@ -55,7 +55,8 @@
|
|
|
55
55
|
"scripts": {
|
|
56
56
|
"audit": "npm audit",
|
|
57
57
|
"build": "vite build",
|
|
58
|
-
"check": "npm outdated; npm-check-updates -i
|
|
58
|
+
"check:updates": "npm outdated; npm-check-updates -i",
|
|
59
|
+
"check:vulnerabilities": "npm run _check:owaspDependencyCheck; npm run _check:owaspBageInsertiions",
|
|
59
60
|
"document": "npm run _document:licenceReportJSON && npm run _document:licenceReportMarkdown && npm run _document:licenceReportCheck && npm run _document:insertLicensesIntoReadme && npm run _document:licenceTree && npm run _document:licenceTreeCheck",
|
|
60
61
|
"format": "prettier --write src/",
|
|
61
62
|
"lint": "eslint .",
|
|
@@ -69,9 +70,9 @@
|
|
|
69
70
|
"_check:owaspBageInsertiions": "node -e \"import('./dist/datapos-development.es.js').then(m => m.insertOWASPDependencyCheckBadgeIntoReadme())\"",
|
|
70
71
|
"_document:licenceReportJSON": "license-report --only=prod,peer --department.value=n/a --licensePeriod=n/a --material=n/a --relatedTo.value=n/a > licenses.json",
|
|
71
72
|
"_document:licenceReportMarkdown": "license-report --config license-report-config.json --only=prod,peer --output=markdown > licenses.md",
|
|
72
|
-
"_document:licenceReportCheck": "license-report-check --source ./licenses.json --allowed 'MIT' --
|
|
73
|
+
"_document:licenceReportCheck": "license-report-check --source ./licenses.json --allowed 'MIT' --output=table",
|
|
73
74
|
"_document:licenceTree": "license-report-recursive --only=prod,peer --department.value=n/a --licensePeriod=n/a --material=n/a --relatedTo.value=n/a --recurse --output=tree > licenseTree.json",
|
|
74
|
-
"_document:licenceTreeCheck": "license-report-check --source ./licenseTree.json --allowed 'MIT' --
|
|
75
|
+
"_document:licenceTreeCheck": "license-report-check --source ./licenseTree.json --allowed 'MIT' --output=table",
|
|
75
76
|
"_document:insertLicensesIntoReadme": "node -e \"import('./dist/datapos-development.es.js').then(m => m.insertLicensesIntoReadme())\"",
|
|
76
77
|
"_sync:withGitHub": "node -e \"import('./dist/datapos-development.es.js').then(m => m.syncWithGitHub())\"",
|
|
77
78
|
"_update:sharedDep": "npm install @datapos/datapos-shared@latest"
|