@geode/opengeodeweb-back 5.10.0-rc.9 → 5.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +3 -4
- package/generate_schemas.js +0 -122
package/package.json
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@geode/opengeodeweb-back",
|
|
3
|
-
"version": "5.10.0
|
|
3
|
+
"version": "5.10.0",
|
|
4
4
|
"description": "",
|
|
5
5
|
"scripts": {
|
|
6
|
-
"json": "node generate_schemas.js opengeodeweb_back routes route /",
|
|
6
|
+
"json": "node ./node_modules/@geode/opengeodeweb-microservice/generate_schemas.js opengeodeweb_back routes route /",
|
|
7
7
|
"test": "npm run json",
|
|
8
8
|
"build": "npm run json"
|
|
9
9
|
},
|
|
10
10
|
"dependencies": {
|
|
11
|
-
"
|
|
11
|
+
"@geode/opengeodeweb-microservice": "latest"
|
|
12
12
|
},
|
|
13
13
|
"exports": {
|
|
14
14
|
"./opengeodeweb_back_schemas.json": {
|
|
@@ -16,7 +16,6 @@
|
|
|
16
16
|
"require": "./opengeodeweb_back_schemas.json"
|
|
17
17
|
}
|
|
18
18
|
},
|
|
19
|
-
"main": "generate_schemas.js",
|
|
20
19
|
"repository": {
|
|
21
20
|
"type": "git",
|
|
22
21
|
"url": "git+https://github.com/Geode-solutions/OpenGeodeWeb-Back.git"
|
package/generate_schemas.js
DELETED
|
@@ -1,122 +0,0 @@
|
|
|
1
|
-
const fs = require("fs");
|
|
2
|
-
const path = require("path");
|
|
3
|
-
const glob = require("glob");
|
|
4
|
-
const process = require("process");
|
|
5
|
-
|
|
6
|
-
console.log("process.argv", process.argv);
|
|
7
|
-
|
|
8
|
-
var projectName = process.argv[2];
|
|
9
|
-
console.log("projectName", projectName);
|
|
10
|
-
var folderName = process.argv[3];
|
|
11
|
-
console.log("folderName", folderName);
|
|
12
|
-
var key = process.argv[4];
|
|
13
|
-
console.log("key", key);
|
|
14
|
-
var separator = process.argv[5];
|
|
15
|
-
console.log("separator", separator);
|
|
16
|
-
|
|
17
|
-
const findDirectoryPath = (targetDirectoryName, folderName) => {
|
|
18
|
-
const pathToCheck = path.join(
|
|
19
|
-
process.cwd(),
|
|
20
|
-
"/src",
|
|
21
|
-
"/",
|
|
22
|
-
targetDirectoryName
|
|
23
|
-
);
|
|
24
|
-
console.log("pathToCheck", pathToCheck);
|
|
25
|
-
|
|
26
|
-
const folders = fs
|
|
27
|
-
.readdirSync(pathToCheck, { withFileTypes: true })
|
|
28
|
-
.filter(
|
|
29
|
-
(folder) =>
|
|
30
|
-
folder.isDirectory() &&
|
|
31
|
-
!folder.name.endsWith(".egg-info") &&
|
|
32
|
-
folder.name != "tests" &&
|
|
33
|
-
folder.name != "__pycache__" &&
|
|
34
|
-
folder.name.includes(folderName)
|
|
35
|
-
)
|
|
36
|
-
.map((folder) => ({
|
|
37
|
-
name: folder.name,
|
|
38
|
-
path: path.join(pathToCheck, folder.name),
|
|
39
|
-
}));
|
|
40
|
-
console.log("folders", folders);
|
|
41
|
-
const routesDirectory = path.join(folders[0].path);
|
|
42
|
-
return routesDirectory;
|
|
43
|
-
};
|
|
44
|
-
|
|
45
|
-
const directoryPath = findDirectoryPath(projectName, folderName);
|
|
46
|
-
|
|
47
|
-
const outputFile = path.join(process.cwd(), `${projectName}_schemas.json`);
|
|
48
|
-
|
|
49
|
-
function return_json_schema(directoryPath, folder_path, projectName) {
|
|
50
|
-
console.log("return_json_schema", directoryPath, folder_path, projectName);
|
|
51
|
-
|
|
52
|
-
const folders = fs
|
|
53
|
-
.readdirSync(path.normalize(directoryPath), { withFileTypes: true })
|
|
54
|
-
.filter((folder) => folder.isDirectory() && folder.name != "__pycache__")
|
|
55
|
-
.map((folder) => ({
|
|
56
|
-
name: folder.name,
|
|
57
|
-
path: path.join(directoryPath, folder.name),
|
|
58
|
-
}));
|
|
59
|
-
var folders_schemas = {};
|
|
60
|
-
folders.forEach((folder) => {
|
|
61
|
-
if (folder.name == "schemas") {
|
|
62
|
-
const jsonFiles = glob.sync(path.join(folder.path, "**/*.json"));
|
|
63
|
-
var schemas = {};
|
|
64
|
-
jsonFiles.forEach((filePath) => {
|
|
65
|
-
try {
|
|
66
|
-
const fileContent = fs.readFileSync(filePath, "utf8");
|
|
67
|
-
var jsonData = JSON.parse(fileContent);
|
|
68
|
-
var filename = filePath
|
|
69
|
-
.replace(/^.*[\\/]/, "")
|
|
70
|
-
.replace(/\.[^/.]+$/, "");
|
|
71
|
-
var route = jsonData[key];
|
|
72
|
-
console.log("FOLDER PATH", projectName);
|
|
73
|
-
var values = [projectName, folder_path, route];
|
|
74
|
-
console.log("values", values);
|
|
75
|
-
values = values.map(function (x) {
|
|
76
|
-
console.log("x", x);
|
|
77
|
-
return x.replace("/", "").replace(".", "");
|
|
78
|
-
}); // first replace first . / by empty string
|
|
79
|
-
values = values.map(function (x) {
|
|
80
|
-
console.log("x", x);
|
|
81
|
-
return x.replaceAll("/", separator).replaceAll(".", separator);
|
|
82
|
-
}); // then replace all . / by separator
|
|
83
|
-
console.log("values", values);
|
|
84
|
-
jsonData["$id"] = values
|
|
85
|
-
.filter(function (val) {
|
|
86
|
-
return val;
|
|
87
|
-
})
|
|
88
|
-
.join(separator);
|
|
89
|
-
schemas[filename] = jsonData;
|
|
90
|
-
} catch (error) {
|
|
91
|
-
console.error(
|
|
92
|
-
`Erreur lors de la lecture du fichier ${filePath}:`,
|
|
93
|
-
error
|
|
94
|
-
);
|
|
95
|
-
}
|
|
96
|
-
});
|
|
97
|
-
folders_schemas = Object.keys(schemas).reduce((acc, key) => {
|
|
98
|
-
const currentSchema = schemas[key];
|
|
99
|
-
const modifiedSchema = {
|
|
100
|
-
$id: path.join(folder_path, currentSchema["$id"]),
|
|
101
|
-
...currentSchema,
|
|
102
|
-
};
|
|
103
|
-
acc[key] = modifiedSchema;
|
|
104
|
-
return acc;
|
|
105
|
-
}, folders_schemas);
|
|
106
|
-
} else {
|
|
107
|
-
var new_folder_path = folder_path + "/" + folder.name;
|
|
108
|
-
var test = return_json_schema(folder.path, new_folder_path, projectName);
|
|
109
|
-
folders_schemas[folder.name] = test;
|
|
110
|
-
}
|
|
111
|
-
});
|
|
112
|
-
return folders_schemas;
|
|
113
|
-
}
|
|
114
|
-
|
|
115
|
-
if (fs.existsSync(outputFile)) {
|
|
116
|
-
fs.unlinkSync(outputFile);
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
const finalJson = {};
|
|
120
|
-
finalJson[projectName] = return_json_schema(directoryPath, "", projectName);
|
|
121
|
-
|
|
122
|
-
fs.writeFileSync(outputFile, JSON.stringify(finalJson, null, 2));
|