@lsbjordao/type-taxon-script 1.1.6 → 1.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/tts +1 -1
- package/package.json +43 -39
- package/readme.md +540 -540
- package/dist/export.js +0 -238
- package/dist/exportSources.js +0 -63
- package/dist/exportToCsv.js +0 -268
- package/dist/findProperty.js +0 -58
- package/dist/import.js +0 -98
- package/dist/init.js +0 -131
- package/dist/new.js +0 -47
- package/dist/tts.js +0 -130
package/dist/export.js
DELETED
|
@@ -1,238 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
-
});
|
|
10
|
-
};
|
|
11
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
-
};
|
|
14
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
-
const fs_1 = __importDefault(require("fs"));
|
|
16
|
-
const path_1 = __importDefault(require("path"));
|
|
17
|
-
const child_process_1 = require("child_process");
|
|
18
|
-
const csv_parser_1 = __importDefault(require("csv-parser"));
|
|
19
|
-
const cli_spinner_1 = require("cli-spinner");
|
|
20
|
-
function deleteJSFiles(folderPath) {
|
|
21
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
22
|
-
try {
|
|
23
|
-
const files = yield fs_1.default.promises.readdir(folderPath);
|
|
24
|
-
for (const file of files) {
|
|
25
|
-
if (file.endsWith('.js')) {
|
|
26
|
-
yield fs_1.default.promises.unlink(`${folderPath}/${file}`);
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
catch (err) {
|
|
31
|
-
console.error('Error deleting files:', err);
|
|
32
|
-
}
|
|
33
|
-
});
|
|
34
|
-
}
|
|
35
|
-
function ttsExport(genus, load) {
|
|
36
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
37
|
-
if (genus === '') {
|
|
38
|
-
console.error('\x1b[31m✖ Argument `--genus` cannot be empty.\x1b[0m');
|
|
39
|
-
return;
|
|
40
|
-
}
|
|
41
|
-
if (!fs_1.default.existsSync('./input') && !fs_1.default.existsSync('./output')) {
|
|
42
|
-
console.error("\x1b[31m✖ The ./input and ./output directories are not present within the project.\x1b[0m\n\x1b[36mℹ️ Please run \x1b[33m`tts init`\x1b[36m before attempting to export a database.\x1b[0m");
|
|
43
|
-
return;
|
|
44
|
-
}
|
|
45
|
-
const spinner = new cli_spinner_1.Spinner('\x1b[36mProcessing... %s\x1b[0m');
|
|
46
|
-
spinner.setSpinnerString('|/-\\'); // spinner sequence
|
|
47
|
-
spinner.start();
|
|
48
|
-
const taxa = [];
|
|
49
|
-
fs_1.default.mkdirSync('./temp', { recursive: true });
|
|
50
|
-
if (load === 'all') {
|
|
51
|
-
const directoryPath = `./taxon/${genus}/`;
|
|
52
|
-
fs_1.default.readdir(directoryPath, (err, files) => {
|
|
53
|
-
if (err) {
|
|
54
|
-
spinner.stop();
|
|
55
|
-
console.error('Error reading directory:', err);
|
|
56
|
-
process.exit();
|
|
57
|
-
}
|
|
58
|
-
const taxa = files
|
|
59
|
-
.filter(file => file.endsWith('.ts') && file !== 'index.ts')
|
|
60
|
-
.map(file => path_1.default.parse(file).name);
|
|
61
|
-
const importStatements = taxa.map((species) => {
|
|
62
|
-
return `import { ${species.replace(/\s/g, '_').replace(/\-([a-z])/, (_, match) => match.toUpperCase())} } from '../taxon/${genus}/${species.replace(/\s/g, '_')}'`;
|
|
63
|
-
}).join('\n');
|
|
64
|
-
const speciesCall = taxa.map((species) => {
|
|
65
|
-
return ` ${species.replace(/\s/g, '_').replace(/\-([a-z])/, (_, match) => match.toUpperCase())},`;
|
|
66
|
-
}).join('\n');
|
|
67
|
-
const fileContent = `// Import genus ${genus}
|
|
68
|
-
import { ${genus} } from '../taxon/${genus}'
|
|
69
|
-
|
|
70
|
-
// Import species of ${genus}
|
|
71
|
-
${importStatements}
|
|
72
|
-
|
|
73
|
-
const ${genus}_species: ${genus}[] = [
|
|
74
|
-
${speciesCall}
|
|
75
|
-
]
|
|
76
|
-
|
|
77
|
-
// Export ${genus}DB.json
|
|
78
|
-
//import { writeFileSync } from 'fs'
|
|
79
|
-
const jsonData = JSON.stringify(${genus}_species);
|
|
80
|
-
console.log(jsonData)
|
|
81
|
-
//const inputFilePath = '../output/${genus}DB.json'
|
|
82
|
-
//writeFileSync(inputFilePath, jsonData, 'utf-8')
|
|
83
|
-
//console.log('\\x1b[1m\\x1b[32m✔ Process finished.\\x1b[0m')`;
|
|
84
|
-
const tempFilePath = './temp/exportTemp.ts';
|
|
85
|
-
fs_1.default.writeFileSync(tempFilePath, fileContent, 'utf-8');
|
|
86
|
-
const fileToTranspile = 'exportTemp';
|
|
87
|
-
(0, child_process_1.exec)(`tsc ./temp/${fileToTranspile}.ts`, (error, stdout, stderr) => {
|
|
88
|
-
if (stdout) {
|
|
89
|
-
spinner.stop();
|
|
90
|
-
console.error('\x1b[31m✖ TS Error:\x1b[0m\n\n' + `${stdout}`);
|
|
91
|
-
process.exit();
|
|
92
|
-
}
|
|
93
|
-
if (stderr) {
|
|
94
|
-
spinner.stop();
|
|
95
|
-
console.error('\x1b[31m✖ TS Error:\x1b[0m\n\n' + `${stderr}`);
|
|
96
|
-
process.exit();
|
|
97
|
-
}
|
|
98
|
-
try {
|
|
99
|
-
fs_1.default.unlinkSync(`./temp/${fileToTranspile}.ts`);
|
|
100
|
-
}
|
|
101
|
-
catch (err) {
|
|
102
|
-
spinner.stop();
|
|
103
|
-
console.error(`An error occurred while deleting the file: ${err}`);
|
|
104
|
-
process.exit();
|
|
105
|
-
}
|
|
106
|
-
(0, child_process_1.exec)(`node ./temp/${fileToTranspile}.js > ./output/${genus}DB.json`, (error, stdout, stderr) => {
|
|
107
|
-
// if (error) {
|
|
108
|
-
// spinner.stop()
|
|
109
|
-
// console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${error.message}`)
|
|
110
|
-
// process.exit()
|
|
111
|
-
// }
|
|
112
|
-
if (stdout) {
|
|
113
|
-
spinner.stop();
|
|
114
|
-
console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${stdout}`);
|
|
115
|
-
process.exit();
|
|
116
|
-
}
|
|
117
|
-
if (stderr) {
|
|
118
|
-
spinner.stop();
|
|
119
|
-
console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${stderr}`);
|
|
120
|
-
process.exit();
|
|
121
|
-
}
|
|
122
|
-
deleteJSFiles(`./taxon/${genus}`).then(() => {
|
|
123
|
-
const filePath = './output/';
|
|
124
|
-
console.log(`\x1b[1m\x1b[32m✔ JSON database exported: \x1b[33m${filePath}${genus}DB.json\x1b[0m\x1b[1m\x1b[32m\x1b[0m`);
|
|
125
|
-
spinner.stop();
|
|
126
|
-
try {
|
|
127
|
-
fs_1.default.unlinkSync(`./temp/${fileToTranspile}.js`);
|
|
128
|
-
fs_1.default.rm('./temp', { recursive: true }, (err) => {
|
|
129
|
-
if (err) {
|
|
130
|
-
console.error('Error deleting directory:', err);
|
|
131
|
-
process.exit();
|
|
132
|
-
}
|
|
133
|
-
});
|
|
134
|
-
}
|
|
135
|
-
catch (err) {
|
|
136
|
-
console.error(`An error occurred while deleting the file: ${err}`);
|
|
137
|
-
process.exit();
|
|
138
|
-
}
|
|
139
|
-
});
|
|
140
|
-
});
|
|
141
|
-
});
|
|
142
|
-
});
|
|
143
|
-
}
|
|
144
|
-
if (load === 'csv') {
|
|
145
|
-
const inputFilePath = './input/taxaToExport.csv';
|
|
146
|
-
const tempFilePath = './temp/exportTemp.ts';
|
|
147
|
-
fs_1.default.createReadStream(inputFilePath)
|
|
148
|
-
.pipe((0, csv_parser_1.default)({ headers: false }))
|
|
149
|
-
.on('data', (data) => {
|
|
150
|
-
taxa.push(data['0']);
|
|
151
|
-
})
|
|
152
|
-
.on('end', () => __awaiter(this, void 0, void 0, function* () {
|
|
153
|
-
const importStatements = taxa.map((species) => {
|
|
154
|
-
return `import { ${species.replace(/\s/g, '_').replace(/\-([a-z])/, (_, match) => match.toUpperCase())} } from '../taxon/${genus}/${species.replace(/\s/g, '_')}'`;
|
|
155
|
-
}).join('\n');
|
|
156
|
-
const speciesCall = taxa.map((species) => {
|
|
157
|
-
return ` ${species.replace(/\s/g, '_').replace(/\-([a-z])/, (_, match) => match.toUpperCase())},`;
|
|
158
|
-
}).join('\n');
|
|
159
|
-
const fileContent = `// Import genus ${genus}
|
|
160
|
-
import { ${genus} } from '../taxon/${genus}'
|
|
161
|
-
|
|
162
|
-
// Import species of ${genus}
|
|
163
|
-
${importStatements}
|
|
164
|
-
|
|
165
|
-
const ${genus}_species: ${genus}[] = [
|
|
166
|
-
${speciesCall}
|
|
167
|
-
]
|
|
168
|
-
|
|
169
|
-
// Export ${genus}DB.json
|
|
170
|
-
const jsonData = JSON.stringify(${genus}_species);
|
|
171
|
-
console.log(jsonData)
|
|
172
|
-
// import { writeFileSync } from 'fs'
|
|
173
|
-
// const jsonData = JSON.stringify(${genus}_species)
|
|
174
|
-
// const inputFilePath = '../output/${genus}DB.json'
|
|
175
|
-
// writeFileSync(inputFilePath, jsonData, 'utf-8')
|
|
176
|
-
// console.log('\\x1b[1m\\x1b[32m✔ Process finished.\\x1b[0m')`;
|
|
177
|
-
fs_1.default.writeFileSync(tempFilePath, fileContent, 'utf-8');
|
|
178
|
-
const fileToTranspile = 'exportTemp';
|
|
179
|
-
(0, child_process_1.exec)(`tsc ./temp/${fileToTranspile}.ts`, (error, stdout, stderr) => {
|
|
180
|
-
if (stdout) {
|
|
181
|
-
spinner.stop();
|
|
182
|
-
console.error('\x1b[31m✖ TS Error:\x1b[0m\n\n' + `${stdout}`);
|
|
183
|
-
process.exit();
|
|
184
|
-
}
|
|
185
|
-
if (stderr) {
|
|
186
|
-
spinner.stop();
|
|
187
|
-
console.error('\x1b[31m✖ TS Error:\x1b[0m\n\n' + `${stdout}`);
|
|
188
|
-
process.exit();
|
|
189
|
-
}
|
|
190
|
-
try {
|
|
191
|
-
fs_1.default.unlinkSync(`./temp/${fileToTranspile}.ts`);
|
|
192
|
-
}
|
|
193
|
-
catch (err) {
|
|
194
|
-
spinner.stop();
|
|
195
|
-
console.error(`An error occurred while deleting the file: ${err}`);
|
|
196
|
-
process.exit();
|
|
197
|
-
}
|
|
198
|
-
(0, child_process_1.exec)(`node ./temp/${fileToTranspile}.js > ./output/${genus}DB.json`, (error, stdout, stderr) => {
|
|
199
|
-
// if (error) {
|
|
200
|
-
// spinner.stop()
|
|
201
|
-
// console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${error.message}`)
|
|
202
|
-
// process.exit()
|
|
203
|
-
// }
|
|
204
|
-
if (stdout) {
|
|
205
|
-
spinner.stop();
|
|
206
|
-
console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${stdout}`);
|
|
207
|
-
process.exit();
|
|
208
|
-
}
|
|
209
|
-
if (stderr) {
|
|
210
|
-
spinner.stop();
|
|
211
|
-
console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${stderr}`);
|
|
212
|
-
process.exit();
|
|
213
|
-
}
|
|
214
|
-
deleteJSFiles(`./taxon/${genus}`).then(() => {
|
|
215
|
-
const filePath = './output/';
|
|
216
|
-
console.log(`\x1b[1m\x1b[32m✔ JSON database exported: \x1b[33m${filePath}${genus}DB.json\x1b[0m\x1b[1m\x1b[32m\x1b[0m`);
|
|
217
|
-
spinner.stop();
|
|
218
|
-
try {
|
|
219
|
-
fs_1.default.unlinkSync(`./temp/${fileToTranspile}.js`);
|
|
220
|
-
fs_1.default.rm('./temp', { recursive: true }, (err) => {
|
|
221
|
-
if (err) {
|
|
222
|
-
console.error('Error deleting directory:', err);
|
|
223
|
-
process.exit();
|
|
224
|
-
}
|
|
225
|
-
});
|
|
226
|
-
}
|
|
227
|
-
catch (err) {
|
|
228
|
-
console.error(`An error occurred while deleting the file: ${err}`);
|
|
229
|
-
process.exit();
|
|
230
|
-
}
|
|
231
|
-
});
|
|
232
|
-
});
|
|
233
|
-
});
|
|
234
|
-
}));
|
|
235
|
-
}
|
|
236
|
-
});
|
|
237
|
-
}
|
|
238
|
-
exports.default = ttsExport;
|
package/dist/exportSources.js
DELETED
|
@@ -1,63 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
const fs_1 = __importDefault(require("fs"));
|
|
7
|
-
const lodash_1 = __importDefault(require("lodash"));
|
|
8
|
-
function ttsExportSources(genus) {
|
|
9
|
-
if (genus === '') {
|
|
10
|
-
console.error('\x1b[31m✖ Argument `--genus` cannot be empty.\x1b[0m');
|
|
11
|
-
return;
|
|
12
|
-
}
|
|
13
|
-
if (!fs_1.default.existsSync('./input') && !fs_1.default.existsSync('./output')) {
|
|
14
|
-
console.error("\x1b[31m✖ The ./input and ./output directories are not present within the project.\x1b[0m\n\x1b[36mℹ️ Please run \x1b[33m`tts init`\x1b[36m before attempting to export a database.\x1b[0m");
|
|
15
|
-
return;
|
|
16
|
-
}
|
|
17
|
-
const filePath = `./output/${genus}DB.json`;
|
|
18
|
-
fs_1.default.readFile(filePath, 'utf8', (err, data) => {
|
|
19
|
-
if (err) {
|
|
20
|
-
console.error('Error reading the file:', err);
|
|
21
|
-
return;
|
|
22
|
-
}
|
|
23
|
-
try {
|
|
24
|
-
const jsonData = JSON.parse(data);
|
|
25
|
-
const findObjectsWithSources = (obj, currentPath = []) => {
|
|
26
|
-
let objectsWithSources = [];
|
|
27
|
-
const findObjectsWithSourcesRecursively = (currentObj, path) => {
|
|
28
|
-
if (lodash_1.default.isObject(currentObj)) {
|
|
29
|
-
lodash_1.default.forOwn(currentObj, (value, key) => {
|
|
30
|
-
if (key === 'sources' && Array.isArray(value) && value.length > 0) {
|
|
31
|
-
value.forEach((source) => {
|
|
32
|
-
objectsWithSources.push({
|
|
33
|
-
index: path[0],
|
|
34
|
-
path: path.join('.'),
|
|
35
|
-
specificEpithet: lodash_1.default.get(jsonData[path[0]], 'specificEpithet'),
|
|
36
|
-
source: source
|
|
37
|
-
});
|
|
38
|
-
});
|
|
39
|
-
}
|
|
40
|
-
if (lodash_1.default.isObject(value)) {
|
|
41
|
-
findObjectsWithSourcesRecursively(value, [...path, key]);
|
|
42
|
-
}
|
|
43
|
-
});
|
|
44
|
-
}
|
|
45
|
-
};
|
|
46
|
-
findObjectsWithSourcesRecursively(obj, currentPath);
|
|
47
|
-
objectsWithSources.forEach(item => {
|
|
48
|
-
item.path = item.path.replace(new RegExp(`^${item.index}\\.|${item.index}$`), '');
|
|
49
|
-
});
|
|
50
|
-
return objectsWithSources;
|
|
51
|
-
};
|
|
52
|
-
const objectsWithSources = findObjectsWithSources(jsonData.map((item, index) => (Object.assign(Object.assign({}, item), { index }))));
|
|
53
|
-
const filePathOutput = `./output/${genus}SourcesDB.json`;
|
|
54
|
-
const jsonContent = JSON.stringify(objectsWithSources, null, 2);
|
|
55
|
-
fs_1.default.writeFileSync(filePathOutput, jsonContent, 'utf-8');
|
|
56
|
-
console.log(`\x1b[1m\x1b[32m✔ Database exported: \x1b[33m${filePathOutput}\x1b[0m\x1b[1m\x1b[32m\x1b[0m`);
|
|
57
|
-
}
|
|
58
|
-
catch (jsonErr) {
|
|
59
|
-
console.error('Error parsing JSON:', jsonErr);
|
|
60
|
-
}
|
|
61
|
-
});
|
|
62
|
-
}
|
|
63
|
-
exports.default = ttsExportSources;
|
package/dist/exportToCsv.js
DELETED
|
@@ -1,268 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
-
});
|
|
10
|
-
};
|
|
11
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
-
};
|
|
14
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
-
const fs_1 = __importDefault(require("fs"));
|
|
16
|
-
const path_1 = __importDefault(require("path"));
|
|
17
|
-
const child_process_1 = require("child_process");
|
|
18
|
-
const csv_parser_1 = __importDefault(require("csv-parser"));
|
|
19
|
-
const plainjs_1 = require("@json2csv/plainjs");
|
|
20
|
-
const transforms_1 = require("@json2csv/transforms");
|
|
21
|
-
const cli_spinner_1 = require("cli-spinner");
|
|
22
|
-
function deleteJSFiles(folderPath) {
|
|
23
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
24
|
-
try {
|
|
25
|
-
const files = yield fs_1.default.promises.readdir(folderPath);
|
|
26
|
-
for (const file of files) {
|
|
27
|
-
if (file.endsWith('.js')) {
|
|
28
|
-
yield fs_1.default.promises.unlink(`${folderPath}/${file}`);
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
catch (err) {
|
|
33
|
-
console.error('Error deleting files:', err);
|
|
34
|
-
}
|
|
35
|
-
});
|
|
36
|
-
}
|
|
37
|
-
function ttsExportToCsv(genus, load) {
|
|
38
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
39
|
-
if (genus === '') {
|
|
40
|
-
console.error('\x1b[31m✖ Argument `--genus` cannot be empty.\x1b[0m');
|
|
41
|
-
return;
|
|
42
|
-
}
|
|
43
|
-
if (!fs_1.default.existsSync('./input') && !fs_1.default.existsSync('./output')) {
|
|
44
|
-
console.error("\x1b[31m✖ The ./input and ./output directories are not present within the project.\x1b[0m\n\x1b[36mℹ️ Please run \x1b[33m`tts init`\x1b[36m before attempting to export a database.\x1b[0m");
|
|
45
|
-
return;
|
|
46
|
-
}
|
|
47
|
-
const spinner = new cli_spinner_1.Spinner('\x1b[36mProcessing... %s\x1b[0m');
|
|
48
|
-
spinner.setSpinnerString('|/-\\'); // spinner sequence
|
|
49
|
-
spinner.start();
|
|
50
|
-
const taxa = [];
|
|
51
|
-
fs_1.default.mkdirSync('./temp', { recursive: true });
|
|
52
|
-
if (load === 'all') {
|
|
53
|
-
const directoryPath = `./taxon/${genus}/`;
|
|
54
|
-
fs_1.default.readdir(directoryPath, (err, files) => {
|
|
55
|
-
if (err) {
|
|
56
|
-
spinner.stop();
|
|
57
|
-
console.error('Error reading directory:', err);
|
|
58
|
-
process.exit();
|
|
59
|
-
}
|
|
60
|
-
const taxa = files
|
|
61
|
-
.filter(file => file.endsWith('.ts') && file !== 'index.ts')
|
|
62
|
-
.map(file => path_1.default.parse(file).name);
|
|
63
|
-
const importStatements = taxa.map((species) => {
|
|
64
|
-
return `import { ${species.replace(/\s/g, '_').replace(/\-([a-z])/, (_, match) => match.toUpperCase())} } from '../taxon/${genus}/${species.replace(/\s/g, '_')}'`;
|
|
65
|
-
}).join('\n');
|
|
66
|
-
const speciesCall = taxa.map((species) => {
|
|
67
|
-
return ` ${species.replace(/\s/g, '_').replace(/\-([a-z])/, (_, match) => match.toUpperCase())},`;
|
|
68
|
-
}).join('\n');
|
|
69
|
-
const fileContent = `// Import genus ${genus}
|
|
70
|
-
import { ${genus} } from '../taxon/${genus}'
|
|
71
|
-
|
|
72
|
-
// Import species of ${genus}
|
|
73
|
-
${importStatements}
|
|
74
|
-
|
|
75
|
-
const ${genus}_species: ${genus}[] = [
|
|
76
|
-
${speciesCall}
|
|
77
|
-
]
|
|
78
|
-
|
|
79
|
-
// Export ${genus}DB.json
|
|
80
|
-
//import { writeFileSync } from 'fs'
|
|
81
|
-
const jsonData = JSON.stringify(${genus}_species);
|
|
82
|
-
console.log(jsonData)
|
|
83
|
-
//const inputFilePath = '../output/${genus}DB.json'
|
|
84
|
-
//writeFileSync(inputFilePath, jsonData, 'utf-8')
|
|
85
|
-
//console.log('\\x1b[1m\\x1b[32m✔ Process finished.\\x1b[0m')`;
|
|
86
|
-
const tempFilePath = './temp/exportTemp.ts';
|
|
87
|
-
fs_1.default.writeFileSync(tempFilePath, fileContent, 'utf-8');
|
|
88
|
-
const fileToTranspile = 'exportTemp';
|
|
89
|
-
(0, child_process_1.exec)(`tsc ./temp/${fileToTranspile}.ts`, (error, stdout, stderr) => {
|
|
90
|
-
if (stdout) {
|
|
91
|
-
spinner.stop();
|
|
92
|
-
console.error('\x1b[31m✖ TS Error:\x1b[0m\n\n' + `${stdout}`);
|
|
93
|
-
process.exit();
|
|
94
|
-
}
|
|
95
|
-
if (stderr) {
|
|
96
|
-
spinner.stop();
|
|
97
|
-
console.error('\x1b[31m✖ TS Error:\x1b[0m\n\n' + `${stderr}`);
|
|
98
|
-
process.exit();
|
|
99
|
-
}
|
|
100
|
-
try {
|
|
101
|
-
fs_1.default.unlinkSync(`./temp/${fileToTranspile}.ts`);
|
|
102
|
-
}
|
|
103
|
-
catch (err) {
|
|
104
|
-
spinner.stop();
|
|
105
|
-
console.error(`An error occurred while deleting the file: ${err}`);
|
|
106
|
-
process.exit();
|
|
107
|
-
}
|
|
108
|
-
(0, child_process_1.exec)(`node ./temp/${fileToTranspile}.js > ./output/${genus}DB.json`, (error, stdout, stderr) => {
|
|
109
|
-
// if (error) {
|
|
110
|
-
// spinner.stop()
|
|
111
|
-
// console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${error.message}`)
|
|
112
|
-
// process.exit()
|
|
113
|
-
// }
|
|
114
|
-
if (stdout) {
|
|
115
|
-
spinner.stop();
|
|
116
|
-
console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${stdout}`);
|
|
117
|
-
process.exit();
|
|
118
|
-
}
|
|
119
|
-
if (stderr) {
|
|
120
|
-
spinner.stop();
|
|
121
|
-
console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${stderr}`);
|
|
122
|
-
process.exit();
|
|
123
|
-
}
|
|
124
|
-
deleteJSFiles(`./taxon/${genus}`).then(() => {
|
|
125
|
-
const filePath = './output/';
|
|
126
|
-
try {
|
|
127
|
-
const data = fs_1.default.readFileSync(`./output/${genus}DB.json`, 'utf8');
|
|
128
|
-
const opts = {
|
|
129
|
-
transforms: [
|
|
130
|
-
(0, transforms_1.flatten)({ separator: '.' })
|
|
131
|
-
]
|
|
132
|
-
};
|
|
133
|
-
const parser = new plainjs_1.Parser(opts);
|
|
134
|
-
const csv = parser.parse(JSON.parse(data));
|
|
135
|
-
fs_1.default.writeFileSync(`./output/${genus}DB.csv`, csv);
|
|
136
|
-
}
|
|
137
|
-
catch (err) {
|
|
138
|
-
console.error('Error reading the file:', err);
|
|
139
|
-
}
|
|
140
|
-
console.log(`\x1b[1m\x1b[32m✔ CSV database exported: \x1b[33m${filePath}${genus}DB.csv\x1b[0m\x1b[1m\x1b[32m\x1b[0m`);
|
|
141
|
-
spinner.stop();
|
|
142
|
-
try {
|
|
143
|
-
fs_1.default.unlinkSync(`./temp/${fileToTranspile}.js`);
|
|
144
|
-
fs_1.default.rm('./temp', { recursive: true }, (err) => {
|
|
145
|
-
if (err) {
|
|
146
|
-
console.error('Error deleting directory:', err);
|
|
147
|
-
process.exit();
|
|
148
|
-
}
|
|
149
|
-
});
|
|
150
|
-
}
|
|
151
|
-
catch (err) {
|
|
152
|
-
console.error(`An error occurred while deleting the file: ${err}`);
|
|
153
|
-
process.exit();
|
|
154
|
-
}
|
|
155
|
-
});
|
|
156
|
-
});
|
|
157
|
-
});
|
|
158
|
-
});
|
|
159
|
-
}
|
|
160
|
-
if (load === 'csv') {
|
|
161
|
-
const inputFilePath = './input/taxaToExport.csv';
|
|
162
|
-
const tempFilePath = './temp/exportTemp.ts';
|
|
163
|
-
fs_1.default.createReadStream(inputFilePath)
|
|
164
|
-
.pipe((0, csv_parser_1.default)({ headers: false }))
|
|
165
|
-
.on('data', (data) => {
|
|
166
|
-
taxa.push(data['0']);
|
|
167
|
-
})
|
|
168
|
-
.on('end', () => __awaiter(this, void 0, void 0, function* () {
|
|
169
|
-
const importStatements = taxa.map((species) => {
|
|
170
|
-
return `import { ${species.replace(/\s/g, '_').replace(/\-([a-z])/, (_, match) => match.toUpperCase())} } from '../taxon/${genus}/${species.replace(/\s/g, '_')}'`;
|
|
171
|
-
}).join('\n');
|
|
172
|
-
const speciesCall = taxa.map((species) => {
|
|
173
|
-
return ` ${species.replace(/\s/g, '_').replace(/\-([a-z])/, (_, match) => match.toUpperCase())},`;
|
|
174
|
-
}).join('\n');
|
|
175
|
-
const fileContent = `// Import genus ${genus}
|
|
176
|
-
import { ${genus} } from '../taxon/${genus}'
|
|
177
|
-
|
|
178
|
-
// Import species of ${genus}
|
|
179
|
-
${importStatements}
|
|
180
|
-
|
|
181
|
-
const ${genus}_species: ${genus}[] = [
|
|
182
|
-
${speciesCall}
|
|
183
|
-
]
|
|
184
|
-
|
|
185
|
-
// Export ${genus}DB.json
|
|
186
|
-
const jsonData = JSON.stringify(${genus}_species);
|
|
187
|
-
console.log(jsonData)
|
|
188
|
-
// import { writeFileSync } from 'fs'
|
|
189
|
-
// const jsonData = JSON.stringify(${genus}_species)
|
|
190
|
-
// const inputFilePath = '../output/${genus}DB.json'
|
|
191
|
-
// writeFileSync(inputFilePath, jsonData, 'utf-8')
|
|
192
|
-
// console.log('\\x1b[1m\\x1b[32m✔ Process finished.\\x1b[0m')`;
|
|
193
|
-
fs_1.default.writeFileSync(tempFilePath, fileContent, 'utf-8');
|
|
194
|
-
const fileToTranspile = 'exportTemp';
|
|
195
|
-
(0, child_process_1.exec)(`tsc ./temp/${fileToTranspile}.ts`, (error, stdout, stderr) => {
|
|
196
|
-
if (stdout) {
|
|
197
|
-
spinner.stop();
|
|
198
|
-
console.error('\x1b[31m✖ TS Error:\x1b[0m\n\n' + `${stdout}`);
|
|
199
|
-
process.exit();
|
|
200
|
-
}
|
|
201
|
-
if (stderr) {
|
|
202
|
-
spinner.stop();
|
|
203
|
-
console.error('\x1b[31m✖ TS Error:\x1b[0m\n\n' + `${stdout}`);
|
|
204
|
-
process.exit();
|
|
205
|
-
}
|
|
206
|
-
try {
|
|
207
|
-
fs_1.default.unlinkSync(`./temp/${fileToTranspile}.ts`);
|
|
208
|
-
}
|
|
209
|
-
catch (err) {
|
|
210
|
-
spinner.stop();
|
|
211
|
-
console.error(`An error occurred while deleting the file: ${err}`);
|
|
212
|
-
process.exit();
|
|
213
|
-
}
|
|
214
|
-
(0, child_process_1.exec)(`node ./temp/${fileToTranspile}.js > ./output/${genus}DB.json`, (error, stdout, stderr) => {
|
|
215
|
-
// if (error) {
|
|
216
|
-
// spinner.stop()
|
|
217
|
-
// console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${error.message}`)
|
|
218
|
-
// process.exit()
|
|
219
|
-
// }
|
|
220
|
-
if (stdout) {
|
|
221
|
-
spinner.stop();
|
|
222
|
-
console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${stdout}`);
|
|
223
|
-
process.exit();
|
|
224
|
-
}
|
|
225
|
-
if (stderr) {
|
|
226
|
-
spinner.stop();
|
|
227
|
-
console.error('\x1b[31m✖ JS execution time error:\x1b[0m\n\n' + `${stderr}`);
|
|
228
|
-
process.exit();
|
|
229
|
-
}
|
|
230
|
-
deleteJSFiles(`./taxon/${genus}`).then(() => {
|
|
231
|
-
const filePath = './output/';
|
|
232
|
-
try {
|
|
233
|
-
const data = fs_1.default.readFileSync(`./output/${genus}DB.json`, 'utf8');
|
|
234
|
-
const opts = {
|
|
235
|
-
transforms: [
|
|
236
|
-
(0, transforms_1.flatten)({ separator: '.' })
|
|
237
|
-
]
|
|
238
|
-
};
|
|
239
|
-
const parser = new plainjs_1.Parser(opts);
|
|
240
|
-
const csv = parser.parse(JSON.parse(data));
|
|
241
|
-
fs_1.default.writeFileSync(`./output/${genus}DB.csv`, csv);
|
|
242
|
-
}
|
|
243
|
-
catch (err) {
|
|
244
|
-
console.error('Error reading the file:', err);
|
|
245
|
-
}
|
|
246
|
-
console.log(`\x1b[1m\x1b[32m✔ CSV database exported: \x1b[33m${filePath}${genus}DB.csv\x1b[0m\x1b[1m\x1b[32m\x1b[0m`);
|
|
247
|
-
spinner.stop();
|
|
248
|
-
try {
|
|
249
|
-
fs_1.default.unlinkSync(`./temp/${fileToTranspile}.js`);
|
|
250
|
-
fs_1.default.rm('./temp', { recursive: true }, (err) => {
|
|
251
|
-
if (err) {
|
|
252
|
-
console.error('Error deleting directory:', err);
|
|
253
|
-
process.exit();
|
|
254
|
-
}
|
|
255
|
-
});
|
|
256
|
-
}
|
|
257
|
-
catch (err) {
|
|
258
|
-
console.error(`An error occurred while deleting the file: ${err}`);
|
|
259
|
-
process.exit();
|
|
260
|
-
}
|
|
261
|
-
});
|
|
262
|
-
});
|
|
263
|
-
});
|
|
264
|
-
}));
|
|
265
|
-
}
|
|
266
|
-
});
|
|
267
|
-
}
|
|
268
|
-
exports.default = ttsExportToCsv;
|
package/dist/findProperty.js
DELETED
|
@@ -1,58 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
const fs_1 = __importDefault(require("fs"));
|
|
7
|
-
const lodash_1 = __importDefault(require("lodash"));
|
|
8
|
-
function ttsfindProperty(property, genus) {
|
|
9
|
-
if (property === '') {
|
|
10
|
-
console.error('\x1b[31m✖ Argument `--property` cannot be empty.\x1b[0m');
|
|
11
|
-
return;
|
|
12
|
-
}
|
|
13
|
-
if (genus === '') {
|
|
14
|
-
console.error('\x1b[31m✖ Argument `--genus` cannot be empty.\x1b[0m');
|
|
15
|
-
return;
|
|
16
|
-
}
|
|
17
|
-
const filePath = `./output/${genus}DB.json`;
|
|
18
|
-
const propertyPathToFind = property;
|
|
19
|
-
fs_1.default.readFile(filePath, 'utf8', (err, data) => {
|
|
20
|
-
if (err) {
|
|
21
|
-
console.error('Error reading the file:', err);
|
|
22
|
-
return;
|
|
23
|
-
}
|
|
24
|
-
try {
|
|
25
|
-
const jsonData = JSON.parse(data);
|
|
26
|
-
const findPropertyPath = (obj, propertyPath, currentPath = []) => {
|
|
27
|
-
const paths = [];
|
|
28
|
-
const findPathsRecursively = (currentObj, path) => {
|
|
29
|
-
const lastKey = path[path.length - 1];
|
|
30
|
-
if (lodash_1.default.get(currentObj, propertyPath)) {
|
|
31
|
-
if (typeof lastKey !== 'number') {
|
|
32
|
-
paths.push(path.join('.'));
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
lodash_1.default.forEach(currentObj, (value, key) => {
|
|
36
|
-
if (lodash_1.default.isObject(value)) {
|
|
37
|
-
findPathsRecursively(value, [...path, key]);
|
|
38
|
-
}
|
|
39
|
-
});
|
|
40
|
-
};
|
|
41
|
-
findPathsRecursively(obj, currentPath);
|
|
42
|
-
return paths;
|
|
43
|
-
};
|
|
44
|
-
const resultIndicesAndPaths = jsonData.flatMap((item, index) => {
|
|
45
|
-
const paths = findPropertyPath(item, propertyPathToFind);
|
|
46
|
-
if (paths.length > 0) {
|
|
47
|
-
return { index, paths, specificEpithet: jsonData[index].specificEpithet };
|
|
48
|
-
}
|
|
49
|
-
return [];
|
|
50
|
-
});
|
|
51
|
-
console.log(`\x1b[36mℹ️ Indices and paths of objects with the property \x1b[33m${propertyPathToFind}\x1b[0m\x1b[36m:\n\n\x1b[0m`, resultIndicesAndPaths);
|
|
52
|
-
}
|
|
53
|
-
catch (jsonErr) {
|
|
54
|
-
console.error('Error parsing JSON:', jsonErr);
|
|
55
|
-
}
|
|
56
|
-
});
|
|
57
|
-
}
|
|
58
|
-
exports.default = ttsfindProperty;
|