@tricoteuses/senat 0.3.2 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. package/README.md +55 -0
  2. package/lib/aggregates.d.ts +54 -0
  3. package/lib/aggregates.js +1122 -0
  4. package/lib/aggregates.mjs +802 -0
  5. package/lib/aggregates.ts +947 -0
  6. package/lib/config.d.ts +2 -0
  7. package/lib/config.js +49 -0
  8. package/lib/config.mjs +16 -0
  9. package/lib/config.ts +26 -0
  10. package/lib/databases.d.ts +11 -0
  11. package/lib/databases.js +125 -0
  12. package/lib/databases.mjs +26 -0
  13. package/lib/databases.ts +32 -0
  14. package/lib/datasets.d.ts +28 -0
  15. package/lib/datasets.js +93 -0
  16. package/lib/datasets.mjs +78 -0
  17. package/lib/datasets.ts +118 -0
  18. package/lib/fields.d.ts +10 -0
  19. package/lib/fields.js +68 -0
  20. package/lib/fields.mjs +22 -0
  21. package/lib/fields.ts +29 -0
  22. package/lib/index.d.ts +6 -5
  23. package/lib/index.js +236 -7
  24. package/lib/index.mjs +6 -1
  25. package/lib/index.ts +59 -0
  26. package/lib/inserters.d.ts +6 -2
  27. package/lib/inserters.js +86 -191
  28. package/lib/inserters.mjs +19 -3
  29. package/lib/inserters.ts +545 -0
  30. package/lib/model/ameli.d.ts +4 -0
  31. package/lib/model/ameli.js +167 -0
  32. package/lib/model/ameli.mjs +57 -0
  33. package/lib/model/ameli.ts +86 -0
  34. package/lib/model/debats.d.ts +4 -0
  35. package/lib/model/debats.js +123 -0
  36. package/lib/model/debats.mjs +43 -0
  37. package/lib/model/debats.ts +68 -0
  38. package/lib/model/dosleg.d.ts +29 -0
  39. package/lib/model/dosleg.js +840 -0
  40. package/lib/model/dosleg.mjs +337 -0
  41. package/lib/model/dosleg.ts +558 -0
  42. package/lib/model/index.d.ts +5 -0
  43. package/lib/model/index.js +48 -0
  44. package/lib/model/index.mjs +5 -0
  45. package/lib/model/index.ts +11 -0
  46. package/lib/model/questions.d.ts +2 -0
  47. package/lib/model/questions.js +52 -0
  48. package/lib/model/questions.mjs +8 -0
  49. package/lib/model/questions.ts +14 -0
  50. package/lib/model/sens.d.ts +2 -0
  51. package/lib/model/sens.js +57 -0
  52. package/lib/model/sens.mjs +9 -0
  53. package/lib/model/sens.ts +18 -0
  54. package/lib/model/util.d.ts +1 -0
  55. package/lib/model/util.js +60 -0
  56. package/lib/model/util.mjs +10 -0
  57. package/lib/model/util.ts +16 -0
  58. package/lib/raw_types/ameli.d.ts +416 -410
  59. package/lib/raw_types/ameli.js +26 -144
  60. package/lib/raw_types/ameli.ts +601 -0
  61. package/lib/raw_types/debats.d.ts +82 -80
  62. package/lib/raw_types/debats.js +8 -42
  63. package/lib/raw_types/debats.ts +145 -0
  64. package/lib/raw_types/dosleg.d.ts +936 -920
  65. package/lib/raw_types/dosleg.js +5 -345
  66. package/lib/raw_types/dosleg.ts +2193 -0
  67. package/lib/raw_types/questions.d.ts +226 -2
  68. package/lib/raw_types/questions.js +18 -5
  69. package/lib/raw_types/questions.mjs +1 -8
  70. package/lib/raw_types/questions.ts +249 -0
  71. package/lib/raw_types/sens.d.ts +1180 -1176
  72. package/lib/raw_types/sens.js +3 -397
  73. package/lib/raw_types/sens.ts +2907 -0
  74. package/lib/scripts/convert_data.d.ts +1 -0
  75. package/lib/scripts/convert_data.js +284 -0
  76. package/lib/scripts/convert_data.mjs +146 -0
  77. package/lib/scripts/convert_data.ts +182 -0
  78. package/lib/scripts/fix_db.d.ts +1 -0
  79. package/lib/scripts/fix_db.js +144 -0
  80. package/lib/scripts/fix_db.mjs +64 -0
  81. package/lib/scripts/fix_db.ts +75 -0
  82. package/lib/scripts/images/transparent_150x192.jpg +0 -0
  83. package/lib/scripts/images/transparent_155x225.jpg +0 -0
  84. package/lib/scripts/retrieve_open_data.d.ts +1 -0
  85. package/lib/scripts/retrieve_open_data.js +371 -0
  86. package/lib/scripts/retrieve_open_data.mjs +218 -0
  87. package/lib/scripts/retrieve_open_data.ts +258 -0
  88. package/lib/scripts/retrieve_senateurs_photos.d.ts +1 -0
  89. package/lib/scripts/retrieve_senateurs_photos.js +268 -0
  90. package/lib/scripts/retrieve_senateurs_photos.mjs +164 -0
  91. package/lib/scripts/retrieve_senateurs_photos.ts +200 -0
  92. package/lib/scripts/retrieve_textes.d.ts +1 -0
  93. package/lib/scripts/retrieve_textes.js +179 -0
  94. package/lib/scripts/retrieve_textes.mjs +77 -0
  95. package/lib/scripts/retrieve_textes.ts +95 -0
  96. package/lib/strings.d.ts +1 -0
  97. package/lib/strings.js +49 -0
  98. package/lib/strings.mjs +18 -0
  99. package/lib/strings.ts +26 -0
  100. package/lib/types/ameli.d.ts +5 -0
  101. package/lib/types/ameli.js +7 -4
  102. package/lib/types/ameli.mjs +13 -1
  103. package/lib/types/ameli.ts +21 -0
  104. package/lib/types/debats.d.ts +2 -0
  105. package/lib/types/debats.js +4 -3
  106. package/lib/types/debats.mjs +2 -1
  107. package/lib/types/debats.ts +6 -0
  108. package/lib/types/dosleg.d.ts +28 -0
  109. package/lib/types/dosleg.js +30 -3
  110. package/lib/types/dosleg.mjs +151 -1
  111. package/lib/types/dosleg.ts +284 -0
  112. package/lib/types/questions.d.ts +2 -0
  113. package/lib/types/questions.js +13 -1
  114. package/lib/types/questions.mjs +1 -1
  115. package/lib/types/questions.ts +3 -0
  116. package/lib/types/sens.d.ts +2 -0
  117. package/lib/types/sens.js +4 -3
  118. package/lib/types/sens.mjs +23 -1
  119. package/lib/types/sens.ts +36 -0
  120. package/lib/typings/windows-1252.d.js +2 -0
  121. package/lib/typings/windows-1252.d.mjs +2 -0
  122. package/lib/typings/windows-1252.d.ts +11 -0
  123. package/lib/validators/config.d.ts +1 -0
  124. package/lib/validators/config.js +121 -0
  125. package/lib/validators/config.mjs +54 -0
  126. package/lib/validators/config.ts +79 -0
  127. package/lib/validators/senat.d.ts +0 -0
  128. package/lib/validators/senat.js +28 -0
  129. package/lib/validators/senat.mjs +24 -0
  130. package/lib/validators/senat.ts +26 -0
  131. package/package.json +35 -4
@@ -0,0 +1,218 @@
1
+ import assert from "assert";
2
+ import { execSync } from "child_process";
3
+ import commandLineArgs from "command-line-args";
4
+ import fs from "fs-extra";
5
+ // import fetch from "node-fetch"
6
+ import path from "path";
7
+ // import stream from "stream"
8
+ import StreamZip from "node-stream-zip";
9
+ import readline from "readline";
10
+ // import util from "util"
11
+ import windows1252 from "windows-1252";
12
+ import config from "../config";
13
+ import { getChosenFromEnabledDatasets } from '../datasets';
14
+ const badWindows1252CharacterRegex = /[\u0080-\u009f]/g;
15
+ const optionsDefinitions = [
16
+ {
17
+ alias: 'k',
18
+ defaultValue: ['All'],
19
+ help: 'categories of datasets to reorganize',
20
+ multiple: true,
21
+ name: 'categories',
22
+ type: String,
23
+ },
24
+ {
25
+ alias: "a",
26
+ help: "all options: fetch, unzip, repair-encoding, import, schema",
27
+ name: "all",
28
+ type: Boolean,
29
+ },
30
+ {
31
+ alias: "c",
32
+ help: "create TypeScript interfaces from databases schemas into src/raw_types directory",
33
+ name: "schema",
34
+ type: Boolean,
35
+ },
36
+ {
37
+ alias: "e",
38
+ help: "repair Windows CP 1252 encoding of SQL dumps",
39
+ name: "repair-encoding",
40
+ type: Boolean,
41
+ },
42
+ {
43
+ alias: "f",
44
+ help: "fetch datasets instead of retrieving them from files",
45
+ name: "fetch",
46
+ type: Boolean,
47
+ },
48
+ {
49
+ alias: "i",
50
+ help: "import SQL dumps into a freshly (re-)created database",
51
+ name: "import",
52
+ type: Boolean,
53
+ },
54
+ {
55
+ alias: "z",
56
+ help: "unzip SQL files",
57
+ name: "unzip",
58
+ type: Boolean,
59
+ },
60
+ {
61
+ alias: "s",
62
+ help: "don't log anything",
63
+ name: "silent",
64
+ type: Boolean,
65
+ },
66
+ {
67
+ defaultOption: true,
68
+ help: "directory containing Sénat open data files",
69
+ name: "dataDir",
70
+ type: String,
71
+ },
72
+ ];
73
+ const options = commandLineArgs(optionsDefinitions);
74
+ // const pipeline = util.promisify(stream.pipeline)
75
+ async function retrieveDataset(dataDir, dataset) {
76
+ const zipFilename = dataset.url.substring(dataset.url.lastIndexOf("/") + 1);
77
+ const zipFilePath = path.join(dataDir, zipFilename);
78
+ if (options.all || options.fetch) {
79
+ // Fetch & save ZIP file.
80
+ if (!options.silent) {
81
+ console.log(`Loading ${dataset.title}: ${zipFilename}…`);
82
+ }
83
+ // Fetch fails with OpenSSL error: dh key too small.
84
+ // (so does "curl").
85
+ // const response = await fetch(dataset.url)
86
+ // if (!response.ok) {
87
+ // console.error(response.status, response.statusText)
88
+ // console.error(await response.text())
89
+ // throw new Error(`Fetch failed: ${dataset.url}`)
90
+ // }
91
+ // await pipeline(response.body!, fs.createWriteStream(zipFilePath))
92
+ fs.removeSync(zipFilePath);
93
+ execSync(`wget --quiet ${dataset.url}`, {
94
+ cwd: dataDir,
95
+ env: process.env,
96
+ encoding: "utf-8",
97
+ // stdio: ["ignore", "ignore", "pipe"],
98
+ });
99
+ }
100
+ const sqlFilename = `${dataset.database}.sql`;
101
+ const sqlFilePath = path.join(dataDir, sqlFilename);
102
+ if (options.all || options.unzip) {
103
+ if (!options.silent) {
104
+ console.log(`Unzipping ${dataset.title}: ${zipFilename}…`);
105
+ }
106
+ fs.removeSync(sqlFilePath);
107
+ const zip = new StreamZip({
108
+ file: zipFilePath,
109
+ storeEntries: true,
110
+ });
111
+ await new Promise((resolve, reject) => {
112
+ zip.on("ready", () => {
113
+ zip.extract(null, dataDir, (err, _count) => {
114
+ zip.close();
115
+ if (err) {
116
+ reject(err);
117
+ }
118
+ else {
119
+ resolve(null);
120
+ }
121
+ });
122
+ });
123
+ });
124
+ if (dataset.repairZip !== undefined) {
125
+ if (!options.silent) {
126
+ console.log(`Repairing Zip path ${dataset.title}: ${sqlFilename}…`);
127
+ }
128
+ dataset.repairZip(dataset, dataDir);
129
+ }
130
+ }
131
+ if ((options.all || options["repair-encoding"]) && dataset.repairEncoding) {
132
+ if (!options.silent) {
133
+ console.log(`Repairing Windows CP1252 encoding of ${dataset.title}: ${sqlFilename}…`);
134
+ }
135
+ const repairedSqlFilePath = sqlFilePath + ".repaired";
136
+ const repairedSqlWriter = fs.createWriteStream(repairedSqlFilePath, {
137
+ encoding: "utf8",
138
+ });
139
+ const lineReader = readline.createInterface({
140
+ input: fs.createReadStream(sqlFilePath, { encoding: "utf8" }),
141
+ crlfDelay: Infinity,
142
+ });
143
+ for await (const line of lineReader) {
144
+ repairedSqlWriter.write(line.replace(badWindows1252CharacterRegex, (match) => windows1252.decode(match, { mode: "fatal" })) + "\n");
145
+ }
146
+ repairedSqlWriter.end();
147
+ await fs.move(repairedSqlFilePath, sqlFilePath, { overwrite: true });
148
+ }
149
+ if (options.all || options.import) {
150
+ if (!options.silent) {
151
+ console.log(`Importing ${dataset.title}: ${sqlFilename}…`);
152
+ }
153
+ execSync(`psql -c "DROP DATABASE IF EXISTS ${dataset.database}"`, {
154
+ cwd: dataDir,
155
+ env: process.env,
156
+ encoding: "utf-8",
157
+ // stdio: ["ignore", "ignore", "pipe"],
158
+ });
159
+ execSync(`psql -c "CREATE DATABASE ${dataset.database} WITH OWNER opendata"`, {
160
+ cwd: dataDir,
161
+ env: process.env,
162
+ encoding: "utf-8",
163
+ // stdio: ["ignore", "ignore", "pipe"],
164
+ });
165
+ execSync(`psql -f ${sqlFilename} ${dataset.database}`, {
166
+ cwd: dataDir,
167
+ env: process.env,
168
+ encoding: "utf-8",
169
+ // stdio: ["ignore", "ignore", "pipe"],
170
+ });
171
+ }
172
+ if (options.schema) {
173
+ const definitionsDir = path.resolve("src", "raw_types");
174
+ assert(fs.statSync(definitionsDir).isDirectory());
175
+ if (!options.silent) {
176
+ console.log(`Creating TypeScript definitions from schema of database ${dataset.database}…`);
177
+ }
178
+ const dbConnectionString = `postgres://${process.env.PGUSER}:${process.env.PGPASSWORD}@${process.env.PGHOST}:${process.env.PGPORT}/${dataset.database}`;
179
+ const definitionFilePath = path.join(definitionsDir, `${dataset.database}.ts`);
180
+ execSync(`npx schemats generate -c ${dbConnectionString} -s public -o ${definitionFilePath}`, {
181
+ // cwd: dataDir,
182
+ env: process.env,
183
+ encoding: "utf-8",
184
+ // stdio: ["ignore", "ignore", "pipe"],
185
+ });
186
+ const definition = fs.readFileSync(definitionFilePath, { encoding: "utf8" });
187
+ const definitionRepaired = definition
188
+ .replace(/\r\n/g, "\n")
189
+ .replace(/AUTO-GENERATED FILE @ \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}/, "AUTO-GENERATED FILE");
190
+ fs.writeFileSync(definitionFilePath, definitionRepaired);
191
+ }
192
+ }
193
+ async function retrieveOpenData() {
194
+ const dataDir = options.dataDir;
195
+ assert(dataDir, "Missing argument: data directory");
196
+ process.env = {
197
+ ...process.env,
198
+ PGHOST: process.env.PGHOST || config.db.host,
199
+ PGPORT: process.env.PGPORT || config.db.port,
200
+ PGUSER: process.env.PGUSER || config.db.user,
201
+ PGPASSWORD: process.env.PGPASSWORD || config.db.password
202
+ };
203
+ assert(process.env.PGHOST
204
+ && process.env.PGPORT
205
+ && process.env.PGUSER
206
+ && process.env.PGPASSWORD, 'Missing database configuration: environment variables PGHOST, PGPORT, PGUSER and PGPASSWORD or TRICOTEUSES_SENAT_DB_* in .env file');
207
+ const choosenDatasets = getChosenFromEnabledDatasets(options.categories);
208
+ // await Promise.all(choosenDatasets.map(dataset => retrieveDataset(dataDir, dataset)))
209
+ for (const dataset of choosenDatasets) {
210
+ await retrieveDataset(dataDir, dataset);
211
+ }
212
+ }
213
+ retrieveOpenData()
214
+ .then(() => process.exit(0))
215
+ .catch((error) => {
216
+ console.log(error);
217
+ process.exit(1);
218
+ });
@@ -0,0 +1,258 @@
1
+ import assert from "assert"
2
+ import { execSync } from "child_process"
3
+ import commandLineArgs from "command-line-args"
4
+ import fs from "fs-extra"
5
+ // import fetch from "node-fetch"
6
+ import path from "path"
7
+ // import stream from "stream"
8
+ import StreamZip from "node-stream-zip"
9
+ import readline from "readline"
10
+ // import util from "util"
11
+ import windows1252 from "windows-1252"
12
+
13
+ import config from "../config"
14
+ import {
15
+ Dataset,
16
+ getChosenFromEnabledDatasets
17
+ } from '../datasets'
18
+
19
+ const badWindows1252CharacterRegex = /[\u0080-\u009f]/g
20
+ const optionsDefinitions = [
21
+ {
22
+ alias: 'k',
23
+ defaultValue: ['All'],
24
+ help: 'categories of datasets to reorganize',
25
+ multiple: true,
26
+ name: 'categories',
27
+ type: String,
28
+ },
29
+ {
30
+ alias: "a",
31
+ help: "all options: fetch, unzip, repair-encoding, import, schema",
32
+ name: "all",
33
+ type: Boolean,
34
+ },
35
+ {
36
+ alias: "c",
37
+ help:
38
+ "create TypeScript interfaces from databases schemas into src/raw_types directory",
39
+ name: "schema",
40
+ type: Boolean,
41
+ },
42
+ {
43
+ alias: "e",
44
+ help: "repair Windows CP 1252 encoding of SQL dumps",
45
+ name: "repair-encoding",
46
+ type: Boolean,
47
+ },
48
+ {
49
+ alias: "f",
50
+ help: "fetch datasets instead of retrieving them from files",
51
+ name: "fetch",
52
+ type: Boolean,
53
+ },
54
+ {
55
+ alias: "i",
56
+ help: "import SQL dumps into a freshly (re-)created database",
57
+ name: "import",
58
+ type: Boolean,
59
+ },
60
+ {
61
+ alias: "z",
62
+ help: "unzip SQL files",
63
+ name: "unzip",
64
+ type: Boolean,
65
+ },
66
+ {
67
+ alias: "s",
68
+ help: "don't log anything",
69
+ name: "silent",
70
+ type: Boolean,
71
+ },
72
+ {
73
+ defaultOption: true,
74
+ help: "directory containing Sénat open data files",
75
+ name: "dataDir",
76
+ type: String,
77
+ },
78
+ ]
79
+ const options = commandLineArgs(optionsDefinitions)
80
+
81
+ // const pipeline = util.promisify(stream.pipeline)
82
+
83
+ async function retrieveDataset(
84
+ dataDir: string,
85
+ dataset: Dataset,
86
+ ): Promise<void> {
87
+ const zipFilename = dataset.url.substring(dataset.url.lastIndexOf("/") + 1)
88
+ const zipFilePath = path.join(dataDir, zipFilename)
89
+ if (options.all || options.fetch) {
90
+ // Fetch & save ZIP file.
91
+ if (!options.silent) {
92
+ console.log(`Loading ${dataset.title}: ${zipFilename}…`)
93
+ }
94
+ // Fetch fails with OpenSSL error: dh key too small.
95
+ // (so does "curl").
96
+ // const response = await fetch(dataset.url)
97
+ // if (!response.ok) {
98
+ // console.error(response.status, response.statusText)
99
+ // console.error(await response.text())
100
+ // throw new Error(`Fetch failed: ${dataset.url}`)
101
+ // }
102
+ // await pipeline(response.body!, fs.createWriteStream(zipFilePath))
103
+ fs.removeSync(zipFilePath)
104
+ execSync(`wget --quiet ${dataset.url}`, {
105
+ cwd: dataDir,
106
+ env: process.env,
107
+ encoding: "utf-8",
108
+ // stdio: ["ignore", "ignore", "pipe"],
109
+ })
110
+ }
111
+
112
+ const sqlFilename = `${dataset.database}.sql`
113
+ const sqlFilePath = path.join(dataDir, sqlFilename)
114
+ if (options.all || options.unzip) {
115
+ if (!options.silent) {
116
+ console.log(`Unzipping ${dataset.title}: ${zipFilename}…`)
117
+ }
118
+ fs.removeSync(sqlFilePath)
119
+ const zip = new StreamZip({
120
+ file: zipFilePath,
121
+ storeEntries: true,
122
+ })
123
+ await new Promise((resolve, reject) => {
124
+ zip.on("ready", () => {
125
+ zip.extract(null, dataDir, (err?: any, _count?: number) => {
126
+ zip.close()
127
+ if (err) {
128
+ reject(err)
129
+ } else {
130
+ resolve(null)
131
+ }
132
+ })
133
+ })
134
+ })
135
+ if (dataset.repairZip !== undefined) {
136
+ if (!options.silent) {
137
+ console.log(`Repairing Zip path ${dataset.title}: ${sqlFilename}…`)
138
+ }
139
+ dataset.repairZip(dataset, dataDir)
140
+ }
141
+ }
142
+
143
+ if ((options.all || options["repair-encoding"]) && dataset.repairEncoding) {
144
+ if (!options.silent) {
145
+ console.log(
146
+ `Repairing Windows CP1252 encoding of ${dataset.title}: ${sqlFilename}…`,
147
+ )
148
+ }
149
+ const repairedSqlFilePath = sqlFilePath + ".repaired"
150
+ const repairedSqlWriter = fs.createWriteStream(repairedSqlFilePath, {
151
+ encoding: "utf8",
152
+ })
153
+ const lineReader = readline.createInterface({
154
+ input: fs.createReadStream(sqlFilePath, { encoding: "utf8" }),
155
+ crlfDelay: Infinity,
156
+ })
157
+ for await (const line of lineReader) {
158
+ repairedSqlWriter.write(
159
+ line.replace(badWindows1252CharacterRegex, (match) =>
160
+ windows1252.decode(match, { mode: "fatal" }),
161
+ ) + "\n",
162
+ )
163
+ }
164
+ repairedSqlWriter.end()
165
+ await fs.move(repairedSqlFilePath, sqlFilePath, { overwrite: true })
166
+ }
167
+
168
+ if (options.all || options.import) {
169
+ if (!options.silent) {
170
+ console.log(`Importing ${dataset.title}: ${sqlFilename}…`)
171
+ }
172
+ execSync(`psql -c "DROP DATABASE IF EXISTS ${dataset.database}"`, {
173
+ cwd: dataDir,
174
+ env: process.env,
175
+ encoding: "utf-8",
176
+ // stdio: ["ignore", "ignore", "pipe"],
177
+ })
178
+ execSync(
179
+ `psql -c "CREATE DATABASE ${dataset.database} WITH OWNER opendata"`,
180
+ {
181
+ cwd: dataDir,
182
+ env: process.env,
183
+ encoding: "utf-8",
184
+ // stdio: ["ignore", "ignore", "pipe"],
185
+ },
186
+ )
187
+ execSync(`psql -f ${sqlFilename} ${dataset.database}`, {
188
+ cwd: dataDir,
189
+ env: process.env,
190
+ encoding: "utf-8",
191
+ // stdio: ["ignore", "ignore", "pipe"],
192
+ })
193
+ }
194
+
195
+ if (options.schema) {
196
+ const definitionsDir = path.resolve("src", "raw_types")
197
+ assert(fs.statSync(definitionsDir).isDirectory())
198
+ if (!options.silent) {
199
+ console.log(
200
+ `Creating TypeScript definitions from schema of database ${dataset.database}…`,
201
+ )
202
+ }
203
+ const dbConnectionString = `postgres://${process.env.PGUSER}:${process.env.PGPASSWORD}@${process.env.PGHOST}:${process.env.PGPORT}/${dataset.database}`
204
+ const definitionFilePath = path.join(
205
+ definitionsDir,
206
+ `${dataset.database}.ts`,
207
+ )
208
+ execSync(
209
+ `npx schemats generate -c ${dbConnectionString} -s public -o ${definitionFilePath}`,
210
+ {
211
+ // cwd: dataDir,
212
+ env: process.env,
213
+ encoding: "utf-8",
214
+ // stdio: ["ignore", "ignore", "pipe"],
215
+ },
216
+ )
217
+ const definition = fs.readFileSync(definitionFilePath, { encoding: "utf8" })
218
+ const definitionRepaired = definition
219
+ .replace(/\r\n/g, "\n")
220
+ .replace(
221
+ /AUTO-GENERATED FILE @ \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}/,
222
+ "AUTO-GENERATED FILE",
223
+ )
224
+ fs.writeFileSync(definitionFilePath, definitionRepaired)
225
+ }
226
+ }
227
+
228
+ async function retrieveOpenData(): Promise<void> {
229
+ const dataDir = options.dataDir
230
+ assert(dataDir, "Missing argument: data directory")
231
+
232
+ process.env = {
233
+ ...process.env,
234
+ PGHOST: process.env.PGHOST || config.db.host,
235
+ PGPORT: process.env.PGPORT || config.db.port,
236
+ PGUSER: process.env.PGUSER || config.db.user,
237
+ PGPASSWORD: process.env.PGPASSWORD || config.db.password
238
+ }
239
+ assert(process.env.PGHOST
240
+ && process.env.PGPORT
241
+ && process.env.PGUSER
242
+ && process.env.PGPASSWORD,
243
+ 'Missing database configuration: environment variables PGHOST, PGPORT, PGUSER and PGPASSWORD or TRICOTEUSES_SENAT_DB_* in .env file'
244
+ )
245
+
246
+ const choosenDatasets: Dataset[] = getChosenFromEnabledDatasets(options.categories)
247
+ // await Promise.all(choosenDatasets.map(dataset => retrieveDataset(dataDir, dataset)))
248
+ for (const dataset of choosenDatasets) {
249
+ await retrieveDataset(dataDir, dataset)
250
+ }
251
+ }
252
+
253
+ retrieveOpenData()
254
+ .then(() => process.exit(0))
255
+ .catch((error) => {
256
+ console.log(error)
257
+ process.exit(1)
258
+ })
@@ -0,0 +1 @@
1
+ export {};