@datapos/datapos-development 0.3.132 β 0.3.134
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/datapos-development.es.js +77 -78
- package/package.json +6 -5
package/README.md
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# Data Positioning Development Library
|
|
2
2
|
|
|
3
3
|
[](https://sonarcloud.io/summary/new_code?id=data-positioning_datapos-development)
|
|
4
|
-
<span><!-- OWASP_BADGE_START -->[](https://data-positioning.github.io/test-lib/dependency-check-reports/dependency-check-report.html)<!-- OWASP_BADGE_END --></span>
|
|
5
5
|
[](https://www.npmjs.com/package/@datapos/datapos-development)
|
|
6
6
|
[](./LICENSE)
|
|
7
7
|
|
|
@@ -62,7 +62,7 @@ The OWASP Dependency Check Report identifies known vulnerabilities in project de
|
|
|
62
62
|
|
|
63
63
|
### Dependency Licenses
|
|
64
64
|
|
|
65
|
-
The following table lists top-level production and peer dependencies
|
|
65
|
+
The following table lists top-level production and peer dependencies. All these dependencies (including transitive ones) have been recursively verified to use Apache-2.0, CC0-1.0, or MITβcommercially friendly licenses with minimal restrictions. Developers cloning this repository should independently verify dev and optional dependencies; users of the published library are covered by these checks.
|
|
66
66
|
|
|
67
67
|
<!-- DEPENDENCY_LICENSES_START -->
|
|
68
68
|
|
|
@@ -1,63 +1,63 @@
|
|
|
1
1
|
import { exec as b } from "node:child_process";
|
|
2
|
-
import { promises as
|
|
2
|
+
import { promises as n } from "node:fs";
|
|
3
3
|
import { nanoid as $ } from "nanoid";
|
|
4
4
|
import { promisify as O } from "node:util";
|
|
5
5
|
const S = ["createObject", "dropObject", "removeRecords", "upsertRecords"], v = ["findObject", "getRecord", "listNodes", "previewObject", "retrieveRecords"], y = O(b);
|
|
6
6
|
async function j() {
|
|
7
7
|
try {
|
|
8
8
|
console.info("π Building configuration...");
|
|
9
|
-
const e = JSON.parse(await
|
|
10
|
-
e.name != null && (o.id = e.name.replace("@datapos/", "").replace("@data-positioning/", "")), e.version != null && (o.version = e.version), await
|
|
9
|
+
const e = JSON.parse(await n.readFile("package.json", "utf8")), o = JSON.parse(await n.readFile("config.json", "utf8"));
|
|
10
|
+
e.name != null && (o.id = e.name.replace("@datapos/", "").replace("@data-positioning/", "")), e.version != null && (o.version = e.version), await n.writeFile("config.json", JSON.stringify(o, void 0, 4), "utf8"), console.info("β
Configuration built.");
|
|
11
11
|
} catch (e) {
|
|
12
12
|
console.error("β Error building configuration.", e);
|
|
13
13
|
}
|
|
14
14
|
}
|
|
15
|
-
async function
|
|
15
|
+
async function J(e) {
|
|
16
16
|
try {
|
|
17
17
|
console.info(`π Building public directory index for identifier '${e}'...`);
|
|
18
18
|
const o = {};
|
|
19
19
|
async function i(s, r) {
|
|
20
20
|
console.info(`βοΈ Processing directory '${s}'...`);
|
|
21
|
-
const
|
|
22
|
-
o[c] =
|
|
21
|
+
const l = [], c = s.substring(`public/${e}`.length);
|
|
22
|
+
o[c] = l;
|
|
23
23
|
for (const a of r) {
|
|
24
|
-
const
|
|
24
|
+
const d = `${s}/${a}`;
|
|
25
25
|
try {
|
|
26
|
-
const f = await
|
|
26
|
+
const f = await n.stat(d);
|
|
27
27
|
if (f.isDirectory()) {
|
|
28
|
-
const u = await
|
|
29
|
-
|
|
28
|
+
const u = await n.readdir(d), p = { childCount: u.length, name: `${a}`, typeId: "folder" };
|
|
29
|
+
l.push(p), await i(d, u);
|
|
30
30
|
} else {
|
|
31
31
|
const u = { id: $(), lastModifiedAt: f.mtimeMs, name: a, size: f.size, typeId: "object" };
|
|
32
|
-
|
|
32
|
+
l.push(u);
|
|
33
33
|
}
|
|
34
34
|
} catch (f) {
|
|
35
35
|
throw new Error(`Unable to get information for '${a}' in 'buildPublicDirectoryIndex'. ${String(f)}`);
|
|
36
36
|
}
|
|
37
37
|
}
|
|
38
|
-
|
|
39
|
-
const f = a.typeId.localeCompare(
|
|
40
|
-
return f === 0 ? a.name.localeCompare(
|
|
38
|
+
l.sort((a, d) => {
|
|
39
|
+
const f = a.typeId.localeCompare(d.typeId);
|
|
40
|
+
return f === 0 ? a.name.localeCompare(d.name) : f;
|
|
41
41
|
});
|
|
42
42
|
}
|
|
43
|
-
const
|
|
44
|
-
await i(`public/${e}`,
|
|
43
|
+
const t = await n.readdir(`public/${e}`);
|
|
44
|
+
await i(`public/${e}`, t), await n.writeFile(`./public/${e}Index.json`, JSON.stringify(o), "utf8"), console.info("β
Public directory index built.");
|
|
45
45
|
} catch (o) {
|
|
46
46
|
console.error("β Error building public directory index.", o);
|
|
47
47
|
}
|
|
48
48
|
}
|
|
49
|
-
async function
|
|
49
|
+
async function k() {
|
|
50
50
|
try {
|
|
51
51
|
console.info("π Building connector configuration...");
|
|
52
|
-
const e = JSON.parse(await
|
|
53
|
-
let
|
|
54
|
-
const r = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm,
|
|
55
|
-
const
|
|
56
|
-
return
|
|
52
|
+
const e = JSON.parse(await n.readFile("package.json", "utf8")), o = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8");
|
|
53
|
+
let t = !1, s = !1;
|
|
54
|
+
const r = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, l = [...i.matchAll(r)].filter((a) => a[1] == null && a[2] !== "constructor").map((a) => {
|
|
55
|
+
const d = a[2];
|
|
56
|
+
return t = t || S.includes(d), s = s || v.includes(d), d;
|
|
57
57
|
});
|
|
58
|
-
|
|
59
|
-
const c = s &&
|
|
60
|
-
c && console.info(`βΉοΈ Supports ${c} usage.`), e.name != null && (o.id = e.name), o.operations =
|
|
58
|
+
l.length > 0 ? console.info(`βΉοΈ Implements ${l.length} operations.`) : console.warn("β οΈ Implements no operations.");
|
|
59
|
+
const c = s && t ? "bidirectional" : s ? "source" : t ? "destination" : "unknown";
|
|
60
|
+
c && console.info(`βΉοΈ Supports ${c} usage.`), e.name != null && (o.id = e.name), o.operations = l, o.usageId = c, e.version != null && (o.version = e.version), await n.writeFile("config.json", JSON.stringify(o, void 0, 4), "utf8"), console.info("β
Connector configuration built.");
|
|
61
61
|
} catch (e) {
|
|
62
62
|
console.error("β Error building connector configuration.", e);
|
|
63
63
|
}
|
|
@@ -65,8 +65,8 @@ async function J() {
|
|
|
65
65
|
async function F() {
|
|
66
66
|
try {
|
|
67
67
|
console.info("π Building context configuration...");
|
|
68
|
-
const e = JSON.parse(await
|
|
69
|
-
e.name != null && (o.id = e.name), o.operations = s, e.version != null && (o.version = e.version), await
|
|
68
|
+
const e = JSON.parse(await n.readFile("package.json", "utf8")), o = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), t = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, s = [...i.matchAll(t)].filter((r) => r[1] == null && r[2] !== "constructor").map((r) => r[2]);
|
|
69
|
+
e.name != null && (o.id = e.name), o.operations = s, e.version != null && (o.version = e.version), await n.writeFile("config.json", JSON.stringify(o, void 0, 4), "utf8"), console.info("β
Context configuration built.");
|
|
70
70
|
} catch (e) {
|
|
71
71
|
console.error("β Error building context configuration.", e);
|
|
72
72
|
}
|
|
@@ -74,8 +74,8 @@ async function F() {
|
|
|
74
74
|
async function R() {
|
|
75
75
|
try {
|
|
76
76
|
console.info("π Building presenter configuration...");
|
|
77
|
-
const e = JSON.parse(await
|
|
78
|
-
e.name != null && (o.id = e.name), o.operations = s, e.version != null && (o.version = e.version), await
|
|
77
|
+
const e = JSON.parse(await n.readFile("package.json", "utf8")), o = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), t = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, s = [...i.matchAll(t)].filter((r) => !r[1] && r[2] !== "constructor").map((r) => r[2]);
|
|
78
|
+
e.name != null && (o.id = e.name), o.operations = s, e.version != null && (o.version = e.version), await n.writeFile("config.json", JSON.stringify(o, void 0, 4), "utf8"), console.info("β
Presenter configuration built.");
|
|
79
79
|
} catch (e) {
|
|
80
80
|
console.error("β Error building context configuration.", e);
|
|
81
81
|
}
|
|
@@ -83,12 +83,12 @@ async function R() {
|
|
|
83
83
|
async function A(e = "./") {
|
|
84
84
|
try {
|
|
85
85
|
console.info("π Bumping version...");
|
|
86
|
-
const o = JSON.parse(await
|
|
86
|
+
const o = JSON.parse(await n.readFile(`${e}package.json`, "utf8"));
|
|
87
87
|
if (o.version == null)
|
|
88
|
-
o.version = "0.0.001", await
|
|
88
|
+
o.version = "0.0.001", await n.writeFile(`${e}package.json`, JSON.stringify(o, void 0, 4), "utf8"), console.warn(`β οΈ Version initialised to ${o.version}.`);
|
|
89
89
|
else {
|
|
90
|
-
const i = o.version,
|
|
91
|
-
o.version = `${
|
|
90
|
+
const i = o.version, t = o.version.split(".");
|
|
91
|
+
o.version = `${t[0]}.${t[1]}.${Number(t[2]) + 1}`, await n.writeFile(`${e}package.json`, JSON.stringify(o, void 0, 4), "utf8"), console.info(`β
Version bumped from ${i} to ${o.version}.`);
|
|
92
92
|
}
|
|
93
93
|
} catch (o) {
|
|
94
94
|
console.error("β Error bumping package version.", o);
|
|
@@ -100,10 +100,12 @@ function D(e) {
|
|
|
100
100
|
async function P() {
|
|
101
101
|
const e = "<!-- DEPENDENCY_LICENSES_START -->", o = "<!-- DEPENDENCY_LICENSES_END -->";
|
|
102
102
|
try {
|
|
103
|
-
const i = (await
|
|
103
|
+
const i = (await n.readFile("./licenses.md", "utf8")).trim(), t = await n.readFile("./README.md", "utf8"), s = t.indexOf(e), r = t.indexOf(o);
|
|
104
104
|
(s === -1 || r === -1) && (console.error("β Dependency license markers not found in readme file."), process.exit(1));
|
|
105
|
-
const
|
|
106
|
-
|
|
105
|
+
const l = t.substring(0, s + e.length) + `
|
|
106
|
+
` + i + `
|
|
107
|
+
` + t.substring(r);
|
|
108
|
+
await n.writeFile("README.md", l, "utf8"), console.log("β
Readme file updated with license information");
|
|
107
109
|
} catch (i) {
|
|
108
110
|
console.error("β Error updating readme file.", i), process.exit(1);
|
|
109
111
|
}
|
|
@@ -111,12 +113,12 @@ async function P() {
|
|
|
111
113
|
async function I() {
|
|
112
114
|
const e = "<!-- OWASP_BADGE_START -->", o = "<!-- OWASP_BADGE_END -->";
|
|
113
115
|
try {
|
|
114
|
-
const i = JSON.parse(await
|
|
116
|
+
const i = JSON.parse(await n.readFile("./dependency-check-reports/dependency-check-report.json", "utf-8")), t = { critical: 0, high: 0, moderate: 0, low: 0, unknown: 0 };
|
|
115
117
|
for (const g of i.dependencies)
|
|
116
118
|
if (g.vulnerabilities != null)
|
|
117
119
|
for (const m of g.vulnerabilities) {
|
|
118
120
|
const w = m.severity?.toLowerCase() ?? "unknown";
|
|
119
|
-
w in
|
|
121
|
+
w in t ? t[w]++ : t.unknown++;
|
|
120
122
|
}
|
|
121
123
|
const s = {
|
|
122
124
|
critical: { color: "D32F2F", label: "critical" },
|
|
@@ -124,23 +126,20 @@ async function I() {
|
|
|
124
126
|
moderate: { color: "FBC02D", label: "moderate" },
|
|
125
127
|
low: { color: "6D8C31", label: "low" },
|
|
126
128
|
unknown: { color: "616161", label: "unknown" }
|
|
127
|
-
}, r =
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
);
|
|
131
|
-
const d = JSON.parse(await t.readFile("config.json", "utf8")), c = [];
|
|
132
|
-
if (r === 0)
|
|
133
|
-
c.push(`[](https://data-positioning.github.io/${d.id}/dependency-check-reports/dependency-check-report.html)`);
|
|
129
|
+
}, r = JSON.parse(await n.readFile("config.json", "utf8")), l = [];
|
|
130
|
+
if (Object.values(t).reduce((g, m) => g + m, 0) === 0)
|
|
131
|
+
console.info("β
No vulnerabilities found."), l.push(`[](https://data-positioning.github.io/${r.id}/dependency-check-reports/dependency-check-report.html)`);
|
|
134
132
|
else
|
|
135
|
-
for (const [g, m] of Object.entries(
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
133
|
+
for (const [g, m] of Object.entries(t)) {
|
|
134
|
+
const w = s[g];
|
|
135
|
+
if (console.warn(`β οΈ ${m} ${w.label} vulnerability(ies) found.`), m === 0) continue;
|
|
136
|
+
const h = `https://img.shields.io/badge/OWASP-${m}%20${w.label}-${w.color}`;
|
|
137
|
+
l.push(`[](https://data-positioning.github.io/${r.id}/dependency-check-reports/dependency-check-report.html)`);
|
|
139
138
|
}
|
|
140
|
-
const a = await
|
|
141
|
-
(
|
|
142
|
-
const u =
|
|
143
|
-
await
|
|
139
|
+
const a = await n.readFile("./README.md", "utf8"), d = a.indexOf(e), f = a.indexOf(o);
|
|
140
|
+
(d === -1 || f === -1) && (console.error("β OWASP badge markers not found in README.md."), process.exit(1));
|
|
141
|
+
const u = l.join(" "), p = a.substring(0, d + e.length) + u + a.substring(f);
|
|
142
|
+
await n.writeFile("README.md", p, "utf8"), console.info("β
OWASP dependency check badge(s) inserted into README.md");
|
|
144
143
|
} catch (i) {
|
|
145
144
|
console.error("β Error updating README with OWASP badges:", i), process.exit(1);
|
|
146
145
|
}
|
|
@@ -148,7 +147,7 @@ async function I() {
|
|
|
148
147
|
async function M() {
|
|
149
148
|
try {
|
|
150
149
|
console.info("π Sending deployment notice...");
|
|
151
|
-
const e = JSON.parse(await
|
|
150
|
+
const e = JSON.parse(await n.readFile("config.json", "utf8")), o = {
|
|
152
151
|
body: JSON.stringify(e),
|
|
153
152
|
headers: { "Content-Type": "application/json" },
|
|
154
153
|
method: "PUT"
|
|
@@ -162,7 +161,7 @@ async function M() {
|
|
|
162
161
|
async function T() {
|
|
163
162
|
try {
|
|
164
163
|
console.info("π Synchronising with GitHub....");
|
|
165
|
-
const e = JSON.parse(await
|
|
164
|
+
const e = JSON.parse(await n.readFile("package.json", "utf8"));
|
|
166
165
|
await y("git add ."), await y(`git commit -m "v${e.version}"`), await y("git push origin main:main"), console.info(`β
Synchronised version ${e.version} with GitHub.`);
|
|
167
166
|
} catch (e) {
|
|
168
167
|
console.error("β Error synchronising with GitHub.", e);
|
|
@@ -171,12 +170,12 @@ async function T() {
|
|
|
171
170
|
async function _(e, o) {
|
|
172
171
|
try {
|
|
173
172
|
console.info("π Uploading directory to R2....");
|
|
174
|
-
async function i(s, r,
|
|
175
|
-
for (const c of
|
|
176
|
-
const a = `${s}/${c}`,
|
|
177
|
-
if ((await
|
|
178
|
-
const u = await
|
|
179
|
-
await i(a,
|
|
173
|
+
async function i(s, r, l) {
|
|
174
|
+
for (const c of l) {
|
|
175
|
+
const a = `${s}/${c}`, d = `${r}/${c}`;
|
|
176
|
+
if ((await n.stat(a)).isDirectory()) {
|
|
177
|
+
const u = await n.readdir(a);
|
|
178
|
+
await i(a, d, u);
|
|
180
179
|
} else {
|
|
181
180
|
console.info(`βοΈ Uploading '${s}/${c}'...`);
|
|
182
181
|
const u = `wrangler r2 object put "datapos-sample-data-eu/${r}/${c}" --file="${s}/${c}" --jurisdiction=eu --remote`, p = await y(u);
|
|
@@ -184,8 +183,8 @@ async function _(e, o) {
|
|
|
184
183
|
}
|
|
185
184
|
}
|
|
186
185
|
}
|
|
187
|
-
const
|
|
188
|
-
await i(`${e}/${o}`, o,
|
|
186
|
+
const t = await n.readdir(`${e}/${o}/`);
|
|
187
|
+
await i(`${e}/${o}`, o, t), console.info("β
Directory uploaded to R2.");
|
|
189
188
|
} catch (i) {
|
|
190
189
|
console.error("β Error uploading directory to R2.", i);
|
|
191
190
|
}
|
|
@@ -193,44 +192,44 @@ async function _(e, o) {
|
|
|
193
192
|
async function W() {
|
|
194
193
|
try {
|
|
195
194
|
console.info("π Uploading module configuration....");
|
|
196
|
-
const e = JSON.parse(await
|
|
195
|
+
const e = JSON.parse(await n.readFile("config.json", "utf8")), o = e.id, i = {
|
|
197
196
|
body: JSON.stringify(e),
|
|
198
197
|
headers: { "Content-Type": "application/json" },
|
|
199
198
|
method: "PUT"
|
|
200
|
-
},
|
|
201
|
-
if (!
|
|
199
|
+
}, t = await fetch(`https://api.datapos.app/states/${o}`, i);
|
|
200
|
+
if (!t.ok) throw new Error(await t.text());
|
|
202
201
|
console.info("β
Module configuration uploaded.");
|
|
203
202
|
} catch (e) {
|
|
204
203
|
console.error("β Error uploading module configuration.", e);
|
|
205
204
|
}
|
|
206
205
|
}
|
|
207
|
-
async function
|
|
206
|
+
async function B(e) {
|
|
208
207
|
try {
|
|
209
208
|
console.info("π Uploading module to R2...");
|
|
210
|
-
const i = `v${JSON.parse(await
|
|
211
|
-
async function
|
|
212
|
-
const
|
|
213
|
-
for (const c of
|
|
214
|
-
const a = `${s}/${c.name}`,
|
|
209
|
+
const i = `v${JSON.parse(await n.readFile("package.json", "utf8")).version}`;
|
|
210
|
+
async function t(s, r = "") {
|
|
211
|
+
const l = await n.readdir(s, { withFileTypes: !0 });
|
|
212
|
+
for (const c of l) {
|
|
213
|
+
const a = `${s}/${c.name}`, d = r ? `${r}/${c.name}` : c.name;
|
|
215
214
|
if (!c.isDirectory()) {
|
|
216
|
-
const f = `${e}_${i}/${
|
|
217
|
-
console.info(`βοΈ Uploading '${
|
|
215
|
+
const f = `${e}_${i}/${d}`.replace(/\\/g, "/"), u = c.name.endsWith(".js") ? "application/javascript" : c.name.endsWith(".css") ? "text/css" : "application/octet-stream";
|
|
216
|
+
console.info(`βοΈ Uploading '${d}' β '${f}'...`);
|
|
218
217
|
const { stderr: p } = await y(`wrangler r2 object put "${f}" --file="${a}" --content-type ${u} --jurisdiction=eu --remote`);
|
|
219
218
|
if (p) throw new Error(p);
|
|
220
219
|
}
|
|
221
220
|
}
|
|
222
221
|
}
|
|
223
|
-
await
|
|
222
|
+
await t("dist"), console.info("β
Module uploaded to R2.");
|
|
224
223
|
} catch (o) {
|
|
225
224
|
console.error("β Error uploading module to R2.", o);
|
|
226
225
|
}
|
|
227
226
|
}
|
|
228
227
|
export {
|
|
229
228
|
j as buildConfig,
|
|
230
|
-
|
|
229
|
+
k as buildConnectorConfig,
|
|
231
230
|
F as buildContextConfig,
|
|
232
231
|
R as buildPresenterConfig,
|
|
233
|
-
|
|
232
|
+
J as buildPublicDirectoryIndex,
|
|
234
233
|
A as bumpVersion,
|
|
235
234
|
D as echoScriptNotImplemented,
|
|
236
235
|
P as insertLicensesIntoReadme,
|
|
@@ -239,5 +238,5 @@ export {
|
|
|
239
238
|
T as syncWithGitHub,
|
|
240
239
|
_ as uploadDirectoryToR2,
|
|
241
240
|
W as uploadModuleConfigToDO,
|
|
242
|
-
|
|
241
|
+
B as uploadModuleToR2
|
|
243
242
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@datapos/datapos-development",
|
|
3
|
-
"version": "0.3.
|
|
3
|
+
"version": "0.3.134",
|
|
4
4
|
"description": "A library of utilities for managing the Data Positioning repositories.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "Jonathan Terrell <terrell.jm@gmail.com>",
|
|
@@ -26,7 +26,8 @@
|
|
|
26
26
|
"dist"
|
|
27
27
|
],
|
|
28
28
|
"dependencies": {
|
|
29
|
-
"@datapos/datapos-shared": "^0.3.252"
|
|
29
|
+
"@datapos/datapos-shared": "^0.3.252",
|
|
30
|
+
"node-pty": "^1.0.0"
|
|
30
31
|
},
|
|
31
32
|
"devDependencies": {
|
|
32
33
|
"@types/node": "^24.10.1",
|
|
@@ -41,8 +42,7 @@
|
|
|
41
42
|
"nanoid": "^5.1.6",
|
|
42
43
|
"npm-check-updates": "^19.1.2",
|
|
43
44
|
"owasp-dependency-check": "^1.0.0",
|
|
44
|
-
"prettier": "^3.
|
|
45
|
-
"retire": "^5.3.0",
|
|
45
|
+
"prettier": "^3.7.1",
|
|
46
46
|
"rollup-plugin-visualizer": "^6.0.5",
|
|
47
47
|
"ts-to-zod": "^5.1.0",
|
|
48
48
|
"type-fest": "^5.2.0",
|
|
@@ -55,7 +55,8 @@
|
|
|
55
55
|
"scripts": {
|
|
56
56
|
"audit": "npm audit",
|
|
57
57
|
"build": "vite build",
|
|
58
|
-
"check": "npm outdated; npm-check-updates -i
|
|
58
|
+
"check:updates": "npm outdated; npm-check-updates -i",
|
|
59
|
+
"check:vulnerabilities": "npm run _check:owaspDependencyCheck; npm run _check:owaspBageInsertiions",
|
|
59
60
|
"document": "npm run _document:licenceReportJSON && npm run _document:licenceReportMarkdown && npm run _document:licenceReportCheck && npm run _document:insertLicensesIntoReadme && npm run _document:licenceTree && npm run _document:licenceTreeCheck",
|
|
60
61
|
"format": "prettier --write src/",
|
|
61
62
|
"lint": "eslint .",
|