@datapos/datapos-development 0.3.38 â 0.3.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/datapos-development.es.js +65 -0
- package/dist/types/src/index.d.ts +8 -0
- package/dist/types/vite.config.d.ts +5 -0
- package/package.json +31 -11
- package/.prettierrc.json +0 -23
- package/.vscode/settings.json +0 -7
- package/LICENSES.json +0 -1
- package/eslint.config.js +0 -15
- package/index.js +0 -186
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { promises as n } from "fs";
|
|
2
|
+
const f = ["createObject", "dropObject", "removeRecords", "upsertRecords"], u = ["findObject", "getRecord", "listNodes", "previewObject", "retrieveRecords"];
|
|
3
|
+
async function p() {
|
|
4
|
+
try {
|
|
5
|
+
console.log("đ Building connector configuration...");
|
|
6
|
+
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), t = await n.readFile("src/index.ts", "utf8");
|
|
7
|
+
let r = !1, s = !1;
|
|
8
|
+
const i = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, c = [...t.matchAll(i)].filter((a) => !a[1] && a[2] !== "constructor").map((a) => {
|
|
9
|
+
const g = a[2];
|
|
10
|
+
return r = r || f.includes(g), s = s || u.includes(g), g;
|
|
11
|
+
});
|
|
12
|
+
c.length > 0 ? console.log(`âšī¸ Implements ${c.length} operations.`) : console.log("â ī¸ Implements no operations.");
|
|
13
|
+
const l = s && r ? "bidirectional" : s ? "source" : r ? "destination" : null;
|
|
14
|
+
console.log(l ? `âšī¸ Supports ${l} usage.` : "â ī¸ No usage identified."), o.name && (e.id = o.name), e.operations = c, e.usageId = l, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.log("â
Connector configuration built.");
|
|
15
|
+
} catch (o) {
|
|
16
|
+
console.warn("â Error building connector configuration.", o);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
async function v() {
|
|
20
|
+
try {
|
|
21
|
+
console.log("đ Building context configuration...");
|
|
22
|
+
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), t = await n.readFile("src/index.ts", "utf8"), r = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, s = [...t.matchAll(r)].filter((i) => !i[1] && i[2] !== "constructor").map((i) => i[2]);
|
|
23
|
+
o.name && (e.id = o.name), e.operations = s, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8");
|
|
24
|
+
} catch (o) {
|
|
25
|
+
console.warn("â Error building context configuration.", o);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
async function w() {
|
|
29
|
+
try {
|
|
30
|
+
console.log("đ Building informer configuration...");
|
|
31
|
+
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), t = await n.readFile("src/index.ts", "utf8"), r = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, s = [...t.matchAll(r)].filter((i) => !i[1] && i[2] !== "constructor").map((i) => i[2]);
|
|
32
|
+
o.name && (e.id = o.name), e.operations = s, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8");
|
|
33
|
+
} catch (o) {
|
|
34
|
+
console.warn("â Error building informer configuration.", o);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
async function m() {
|
|
38
|
+
try {
|
|
39
|
+
console.log("đ Building presenter configuration...");
|
|
40
|
+
const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), t = await n.readFile("src/index.ts", "utf8"), r = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, s = [...t.matchAll(r)].filter((i) => !i[1] && i[2] !== "constructor").map((i) => i[2]);
|
|
41
|
+
o.name && (e.id = o.name), e.operations = s, o.version && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8");
|
|
42
|
+
} catch (o) {
|
|
43
|
+
console.warn("â Error building context configuration.", o);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
async function O() {
|
|
47
|
+
try {
|
|
48
|
+
console.log("đ Bumping version...");
|
|
49
|
+
const o = JSON.parse(await n.readFile("package.json", "utf8"));
|
|
50
|
+
if (o.version) {
|
|
51
|
+
const e = o.version, t = o.version.split(".");
|
|
52
|
+
o.version = `${t[0]}.${t[1]}.${Number(t[2]) + 1}`, await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.log(`â
Version bumped from ${e} to ${o.version}.`);
|
|
53
|
+
} else
|
|
54
|
+
o.version = "0.0.001", await n.writeFile("package.json", JSON.stringify(o, void 0, 4), "utf8"), console.log(`â ī¸ Version initialised to ${o.version}.`);
|
|
55
|
+
} catch (o) {
|
|
56
|
+
console.warn("â Error bumping package version.", o);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
export {
|
|
60
|
+
p as buildConnectorConfig,
|
|
61
|
+
v as buildContextConfig,
|
|
62
|
+
w as buildInformerConfig,
|
|
63
|
+
m as buildPresenterConfig,
|
|
64
|
+
O as bumpVersion
|
|
65
|
+
};
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Development utilities.
|
|
3
|
+
*/
|
|
4
|
+
export declare function buildConnectorConfig(): Promise<void>;
|
|
5
|
+
export declare function buildContextConfig(): Promise<void>;
|
|
6
|
+
export declare function buildInformerConfig(): Promise<void>;
|
|
7
|
+
export declare function buildPresenterConfig(): Promise<void>;
|
|
8
|
+
export declare function bumpVersion(): Promise<void>;
|
package/package.json
CHANGED
|
@@ -3,30 +3,50 @@
|
|
|
3
3
|
"license": "MIT",
|
|
4
4
|
"private": false,
|
|
5
5
|
"type": "module",
|
|
6
|
-
"
|
|
7
|
-
"
|
|
6
|
+
"version": "0.3.40",
|
|
7
|
+
"files": [
|
|
8
|
+
"dist"
|
|
9
|
+
],
|
|
10
|
+
"module": "./dist/datapos-development.es.js",
|
|
11
|
+
"exports": {
|
|
12
|
+
".": {
|
|
13
|
+
"import": "./dist/datapos-development.es.js",
|
|
14
|
+
"types": "./dist/types/src/index.d.ts"
|
|
15
|
+
}
|
|
16
|
+
},
|
|
17
|
+
"types": "./dist/types/src/index.d.ts",
|
|
8
18
|
"devDependencies": {
|
|
9
|
-
"
|
|
10
|
-
"
|
|
19
|
+
"@datapos/datapos-shared": "^0.3.139",
|
|
20
|
+
"@types/node": "^24.10.0",
|
|
21
|
+
"@typescript-eslint/eslint-plugin": "^8.46.3",
|
|
22
|
+
"@typescript-eslint/parser": "^8.46.3",
|
|
23
|
+
"eslint": "^9.39.1",
|
|
24
|
+
"eslint-plugin-import": "^2.32.0",
|
|
25
|
+
"jiti": "^2.6.1",
|
|
26
|
+
"license-report": "^6.8.1",
|
|
11
27
|
"license-report-check": "^0.1.2",
|
|
12
|
-
"nanoid": "^5.1.
|
|
13
|
-
"npm-check-updates": "^
|
|
28
|
+
"nanoid": "^5.1.6",
|
|
29
|
+
"npm-check-updates": "^19.1.2",
|
|
14
30
|
"prettier": "^3.6.2",
|
|
15
31
|
"retire": "^5.3.0",
|
|
16
|
-
"run": "^1.5.0"
|
|
32
|
+
"run": "^1.5.0",
|
|
33
|
+
"type-fest": "^5.2.0",
|
|
34
|
+
"typescript": "^5.9.3",
|
|
35
|
+
"vite": "^7.2.2",
|
|
36
|
+
"vite-plugin-dts": "^4.5.4"
|
|
17
37
|
},
|
|
18
38
|
"scripts": {
|
|
19
39
|
"audit": "npm audit",
|
|
20
|
-
"build": "
|
|
40
|
+
"build": "vite build",
|
|
21
41
|
"bumpVersion": "node -e \"import('./index.js').then(m => m.bumpVersion())\"",
|
|
22
42
|
"check": "npm outdated; npm-check-updates -i && retire",
|
|
23
43
|
"document": "license-report --only=prod,peer > LICENSES.json && license-report-check --source ./LICENSES.json --allowed 'MIT' --allowed 'n/a' --allowed 'Apache-2.0' --output=table",
|
|
24
|
-
"format": "prettier --write *.
|
|
25
|
-
"lint": "eslint
|
|
44
|
+
"format": "prettier --write *.ts",
|
|
45
|
+
"lint": "eslint src/",
|
|
26
46
|
"publishToNPM": "npm publish --access public",
|
|
27
47
|
"release": "npm run syncWithGitHub && npm run publishToNPM",
|
|
28
48
|
"syncWithGitHub": "npm run bumpVersion && node -e \"import('./index.js').then(m => m.syncWithGitHub())\"",
|
|
29
49
|
"test": "echo \"***** TEST SCRIPT NOT IMPLEMENTED. *****\"",
|
|
30
50
|
"updateDependencies": "echo \"***** UPDATE DEPENDENCIES SCRIPT NOT IMPLEMENTED. *****\""
|
|
31
51
|
}
|
|
32
|
-
}
|
|
52
|
+
}
|
package/.prettierrc.json
DELETED
|
@@ -1,23 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"$schema": "https://json.schemastore.org/prettierrc",
|
|
3
|
-
"arrowParens": "always",
|
|
4
|
-
"bracketSameLine": false,
|
|
5
|
-
"bracketSpacing": true,
|
|
6
|
-
"cursorOffset": -1,
|
|
7
|
-
"embeddedLanguageFormatting": "auto",
|
|
8
|
-
"endOfLine": "lf",
|
|
9
|
-
"htmlWhitespaceSensitivity": "css",
|
|
10
|
-
"insertPragma": false,
|
|
11
|
-
"jsxSingleQuote": false,
|
|
12
|
-
"plugins": [],
|
|
13
|
-
"printWidth": 180,
|
|
14
|
-
"proseWrap": "preserve",
|
|
15
|
-
"quoteProps": "as-needed",
|
|
16
|
-
"semi": true,
|
|
17
|
-
"singleAttributePerLine": false,
|
|
18
|
-
"singleQuote": true,
|
|
19
|
-
"tabWidth": 4,
|
|
20
|
-
"trailingComma": "none",
|
|
21
|
-
"useTabs": false,
|
|
22
|
-
"vueIndentScriptAndStyle": false
|
|
23
|
-
}
|
package/.vscode/settings.json
DELETED
package/LICENSES.json
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
[]
|
package/eslint.config.js
DELETED
package/index.js
DELETED
|
@@ -1,186 +0,0 @@
|
|
|
1
|
-
// Dependencies
|
|
2
|
-
const fs = require('fs').promises;
|
|
3
|
-
const { nanoid } = require('nanoid');
|
|
4
|
-
|
|
5
|
-
// Dependencies - Promisify exec.
|
|
6
|
-
const util = require('util');
|
|
7
|
-
const exec = util.promisify(require('child_process').exec);
|
|
8
|
-
|
|
9
|
-
// Operations - Build configuration.
|
|
10
|
-
async function buildConfig(directoryPath) {
|
|
11
|
-
const configJSON = await readJSONFile(`${directoryPath || ''}config.json`);
|
|
12
|
-
const packageJSON = await readJSONFile('package.json');
|
|
13
|
-
await fs.writeFile(`${directoryPath || ''}config.json`, JSON.stringify({ ...configJSON, id: packageJSON.name, version: packageJSON.version }, undefined, 4));
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
// Operations - Document interface.
|
|
17
|
-
async function documentInterface(moduleTypeId) {
|
|
18
|
-
const configJSON = await readJSONFile('config.json');
|
|
19
|
-
const indexCode = await fs.readFile('src/index.ts', 'utf8');
|
|
20
|
-
const regex = /^\s{4}(?:async\s+)?(?:private\s+|public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm;
|
|
21
|
-
const matches = [...indexCode.matchAll(regex)]
|
|
22
|
-
.map((m) => m[1])
|
|
23
|
-
.filter((name) => name !== 'constructor')
|
|
24
|
-
.filter((_, index, arr) => {
|
|
25
|
-
const match = [...indexCode.matchAll(regex)][arr.indexOf(_)];
|
|
26
|
-
return !match[0].includes('private');
|
|
27
|
-
});
|
|
28
|
-
configJSON.interface = matches;
|
|
29
|
-
await fs.writeFile('config.json', JSON.stringify(configJSON, undefined, 4));
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
// Operations - Build public directory index.
|
|
33
|
-
async function buildPublicDirectoryIndex(id) {
|
|
34
|
-
const index = {};
|
|
35
|
-
|
|
36
|
-
async function listDirectoryEntriesRecursively(directoryPath, names) {
|
|
37
|
-
const entries = [];
|
|
38
|
-
const localDirectoryPath = directoryPath.substring(`public/${id}`.length);
|
|
39
|
-
index[localDirectoryPath] = entries;
|
|
40
|
-
for (const name of names) {
|
|
41
|
-
const itemPath = `${directoryPath}/${name}`;
|
|
42
|
-
try {
|
|
43
|
-
const stats = await fs.stat(itemPath);
|
|
44
|
-
if (stats.isDirectory()) {
|
|
45
|
-
const nextLevelChildren = await fs.readdir(itemPath);
|
|
46
|
-
entries.push({ childCount: nextLevelChildren.length, name: `${name}`, typeId: 'folder' });
|
|
47
|
-
await listDirectoryEntriesRecursively(itemPath, nextLevelChildren);
|
|
48
|
-
} else {
|
|
49
|
-
entries.push({ id: nanoid(), lastModifiedAt: stats.mtimeMs, name, size: stats.size, typeId: 'object' });
|
|
50
|
-
}
|
|
51
|
-
} catch (error) {
|
|
52
|
-
console.error(`Unable to get information for '${name}' in 'buildPublicDirectoryIndex'.`, error);
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
// TODO: Prior version: entries.sort((left, right) => right.typeId.localeCompare(left.typeId) || left.name.localeCompare(right.name));
|
|
56
|
-
entries.sort((left, right) => {
|
|
57
|
-
const typeComparison = left.typeId.localeCompare(right.typeId);
|
|
58
|
-
return typeComparison !== 0 ? typeComparison : left.name.localeCompare(right.name);
|
|
59
|
-
});
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
const toplevelNames = await fs.readdir(`public/${id}`);
|
|
63
|
-
await listDirectoryEntriesRecursively(`public/${id}`, toplevelNames);
|
|
64
|
-
await fs.writeFile(`./public/${id}Index.json`, JSON.stringify(index), (error) => {
|
|
65
|
-
if (error) return console.error(error);
|
|
66
|
-
});
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
// Operations - Bump version.
|
|
70
|
-
async function bumpVersion() {
|
|
71
|
-
const packageJSON = await readJSONFile('package.json');
|
|
72
|
-
const versionSegments = packageJSON.version.split('.');
|
|
73
|
-
packageJSON.version = `${versionSegments[0]}.${versionSegments[1]}.${Number(versionSegments[2]) + 1}`;
|
|
74
|
-
await fs.writeFile('package.json', JSON.stringify(packageJSON, undefined, 4));
|
|
75
|
-
console.log(`Bumped to version ${packageJSON.version}.`);
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
// Operations - Clear directory.
|
|
79
|
-
async function clearDirectory(directoryPath) {
|
|
80
|
-
for (const itemName of await fs.readdir(directoryPath)) {
|
|
81
|
-
const itemPath = `${directoryPath}/${itemName}`;
|
|
82
|
-
try {
|
|
83
|
-
const stats = await fs.stat(itemPath);
|
|
84
|
-
if (stats.isDirectory()) {
|
|
85
|
-
await fs.rm(itemPath, { recursive: true, force: true });
|
|
86
|
-
} else {
|
|
87
|
-
await fs.unlink(itemPath);
|
|
88
|
-
}
|
|
89
|
-
} catch (error) {
|
|
90
|
-
console.error(`Unable to get information for '${itemPath}' in 'clearDirectory'.`, error);
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
// Operations - Send deployment notice.
|
|
96
|
-
async function sendDeploymentNotice() {
|
|
97
|
-
const configJSON = await readJSONFile('config.json');
|
|
98
|
-
const options = {
|
|
99
|
-
body: JSON.stringify(configJSON),
|
|
100
|
-
headers: { 'Content-Type': 'application/json' },
|
|
101
|
-
method: 'PUT'
|
|
102
|
-
};
|
|
103
|
-
const response = await fetch(`https://api.datapos.app/states/${configJSON.id}`, options);
|
|
104
|
-
if (!response.ok) console.log(await response.text());
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
// UtilitiesOperations - Sync with GitHub.
|
|
108
|
-
async function syncWithGitHub() {
|
|
109
|
-
const packageJSON = await readJSONFile('package.json');
|
|
110
|
-
await exec('git add .');
|
|
111
|
-
await exec(`git commit -m v${packageJSON.version}`);
|
|
112
|
-
await exec('git push origin main:main');
|
|
113
|
-
}
|
|
114
|
-
|
|
115
|
-
// Operations - Upload directory to Cloudflare R2.
|
|
116
|
-
async function uploadDirectoryToR2(sourceDirectory, uploadDirectory) {
|
|
117
|
-
async function listDirectoryEntriesRecursively(currentSourceDirectory, currentDestinationDirectory, names) {
|
|
118
|
-
for (const name of names) {
|
|
119
|
-
const sourceItemPath = `${currentSourceDirectory}/${name}`;
|
|
120
|
-
const destinationItemPath = `${currentDestinationDirectory}/${name}`;
|
|
121
|
-
try {
|
|
122
|
-
const stats = await fs.stat(sourceItemPath);
|
|
123
|
-
if (stats.isDirectory()) {
|
|
124
|
-
const nextLevelChildren = await fs.readdir(sourceItemPath);
|
|
125
|
-
await listDirectoryEntriesRecursively(sourceItemPath, destinationItemPath, nextLevelChildren);
|
|
126
|
-
} else {
|
|
127
|
-
const command = `wrangler r2 object put "datapos-sample-data-eu/${currentDestinationDirectory}/${name}" --file="${currentSourceDirectory}/${name}" --jurisdiction=eu --remote`;
|
|
128
|
-
const response = await exec(command);
|
|
129
|
-
console.log('Uploading:', `${currentSourceDirectory}/${name}`);
|
|
130
|
-
if (response.stderr) {
|
|
131
|
-
console.log('Command___:', command);
|
|
132
|
-
console.log('Error_____:', response.stderr);
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
} catch (error) {
|
|
136
|
-
console.error(`Unable to get information for '${name}' in 'uploadDirectoryToR2'.`, error);
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
}
|
|
140
|
-
const toplevelNames = await fs.readdir(`${sourceDirectory}/${uploadDirectory}/`);
|
|
141
|
-
await listDirectoryEntriesRecursively(`${sourceDirectory}/${uploadDirectory}`, uploadDirectory, toplevelNames);
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
// Operations - Upload module configuration.
|
|
145
|
-
async function uploadModuleConfig() {
|
|
146
|
-
const configJSON = await readJSONFile('config.json');
|
|
147
|
-
const stateId = configJSON.id;
|
|
148
|
-
const options = {
|
|
149
|
-
body: JSON.stringify(configJSON),
|
|
150
|
-
headers: { 'Content-Type': 'application/json' },
|
|
151
|
-
method: 'PUT'
|
|
152
|
-
};
|
|
153
|
-
const response = await fetch(`https://api.datapos.app/states/${stateId}`, options);
|
|
154
|
-
if (!response.ok) console.log(await response.text());
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
// Operations - Upload module to Cloudflare R2.
|
|
158
|
-
async function uploadModuleToR2(fromPath, toPath) {
|
|
159
|
-
const packageJSON = await readJSONFile('package.json');
|
|
160
|
-
const toPathWithVersion = toPath.replace(/^(.*?\.)/, `$1v${packageJSON.version}.`);
|
|
161
|
-
await exec(`wrangler r2 object put ${toPathWithVersion} --file=dist/${fromPath} --content-type application/javascript --jurisdiction=eu --remote`, { stdio: 'inherit' });
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
// Utilities - Read JSON file.
|
|
165
|
-
async function readJSONFile(path) {
|
|
166
|
-
try {
|
|
167
|
-
return JSON.parse(await fs.readFile(path, 'utf8'));
|
|
168
|
-
} catch (error) {
|
|
169
|
-
console.warn(`WARN: JSON file '${path}' not found or invalid.`, error);
|
|
170
|
-
return {};
|
|
171
|
-
}
|
|
172
|
-
}
|
|
173
|
-
|
|
174
|
-
// Exposures
|
|
175
|
-
export {
|
|
176
|
-
buildConfig,
|
|
177
|
-
buildPublicDirectoryIndex,
|
|
178
|
-
bumpVersion,
|
|
179
|
-
clearDirectory,
|
|
180
|
-
documentInterface,
|
|
181
|
-
sendDeploymentNotice,
|
|
182
|
-
syncWithGitHub,
|
|
183
|
-
uploadDirectoryToR2,
|
|
184
|
-
uploadModuleConfig,
|
|
185
|
-
uploadModuleToR2
|
|
186
|
-
};
|