@tricoteuses/senat 2.22.7 → 2.22.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (134) hide show
  1. package/lib/src/parsers/texte.js +31 -1
  2. package/package.json +1 -1
  3. package/lib/config.d.ts +0 -21
  4. package/lib/config.js +0 -27
  5. package/lib/databases.d.ts +0 -2
  6. package/lib/databases.js +0 -26
  7. package/lib/datasets.d.ts +0 -34
  8. package/lib/datasets.js +0 -233
  9. package/lib/git.d.ts +0 -26
  10. package/lib/git.js +0 -167
  11. package/lib/index.d.ts +0 -13
  12. package/lib/index.js +0 -1
  13. package/lib/loaders.d.ts +0 -58
  14. package/lib/loaders.js +0 -286
  15. package/lib/model/agenda.d.ts +0 -6
  16. package/lib/model/agenda.js +0 -148
  17. package/lib/model/ameli.d.ts +0 -51
  18. package/lib/model/ameli.js +0 -147
  19. package/lib/model/commission.d.ts +0 -18
  20. package/lib/model/commission.js +0 -269
  21. package/lib/model/debats.d.ts +0 -67
  22. package/lib/model/debats.js +0 -95
  23. package/lib/model/documents.d.ts +0 -12
  24. package/lib/model/documents.js +0 -138
  25. package/lib/model/dosleg.d.ts +0 -7
  26. package/lib/model/dosleg.js +0 -326
  27. package/lib/model/index.d.ts +0 -7
  28. package/lib/model/index.js +0 -7
  29. package/lib/model/questions.d.ts +0 -45
  30. package/lib/model/questions.js +0 -89
  31. package/lib/model/scrutins.d.ts +0 -13
  32. package/lib/model/scrutins.js +0 -114
  33. package/lib/model/seance.d.ts +0 -3
  34. package/lib/model/seance.js +0 -267
  35. package/lib/model/sens.d.ts +0 -146
  36. package/lib/model/sens.js +0 -454
  37. package/lib/model/texte.d.ts +0 -7
  38. package/lib/model/texte.js +0 -228
  39. package/lib/model/util.d.ts +0 -9
  40. package/lib/model/util.js +0 -38
  41. package/lib/parsers/texte.d.ts +0 -7
  42. package/lib/parsers/texte.js +0 -228
  43. package/lib/raw_types/ameli.d.ts +0 -914
  44. package/lib/raw_types/ameli.js +0 -5
  45. package/lib/raw_types/debats.d.ts +0 -207
  46. package/lib/raw_types/debats.js +0 -5
  47. package/lib/raw_types/dosleg.d.ts +0 -1619
  48. package/lib/raw_types/dosleg.js +0 -5
  49. package/lib/raw_types/questions.d.ts +0 -423
  50. package/lib/raw_types/questions.js +0 -5
  51. package/lib/raw_types/senat.d.ts +0 -11372
  52. package/lib/raw_types/senat.js +0 -5
  53. package/lib/raw_types/sens.d.ts +0 -8248
  54. package/lib/raw_types/sens.js +0 -5
  55. package/lib/raw_types_schemats/ameli.d.ts +0 -539
  56. package/lib/raw_types_schemats/ameli.js +0 -2
  57. package/lib/raw_types_schemats/debats.d.ts +0 -127
  58. package/lib/raw_types_schemats/debats.js +0 -2
  59. package/lib/raw_types_schemats/dosleg.d.ts +0 -977
  60. package/lib/raw_types_schemats/dosleg.js +0 -2
  61. package/lib/raw_types_schemats/questions.d.ts +0 -237
  62. package/lib/raw_types_schemats/questions.js +0 -2
  63. package/lib/raw_types_schemats/sens.d.ts +0 -6915
  64. package/lib/raw_types_schemats/sens.js +0 -2
  65. package/lib/scripts/convert_data.d.ts +0 -1
  66. package/lib/scripts/convert_data.js +0 -354
  67. package/lib/scripts/data-download.d.ts +0 -1
  68. package/lib/scripts/data-download.js +0 -12
  69. package/lib/scripts/datautil.d.ts +0 -8
  70. package/lib/scripts/datautil.js +0 -34
  71. package/lib/scripts/parse_textes.d.ts +0 -1
  72. package/lib/scripts/parse_textes.js +0 -44
  73. package/lib/scripts/retrieve_agenda.d.ts +0 -1
  74. package/lib/scripts/retrieve_agenda.js +0 -132
  75. package/lib/scripts/retrieve_cr_commission.d.ts +0 -1
  76. package/lib/scripts/retrieve_cr_commission.js +0 -364
  77. package/lib/scripts/retrieve_cr_seance.d.ts +0 -6
  78. package/lib/scripts/retrieve_cr_seance.js +0 -347
  79. package/lib/scripts/retrieve_documents.d.ts +0 -3
  80. package/lib/scripts/retrieve_documents.js +0 -219
  81. package/lib/scripts/retrieve_open_data.d.ts +0 -1
  82. package/lib/scripts/retrieve_open_data.js +0 -316
  83. package/lib/scripts/retrieve_senateurs_photos.d.ts +0 -1
  84. package/lib/scripts/retrieve_senateurs_photos.js +0 -147
  85. package/lib/scripts/retrieve_videos.d.ts +0 -1
  86. package/lib/scripts/retrieve_videos.js +0 -461
  87. package/lib/scripts/shared/cli_helpers.d.ts +0 -95
  88. package/lib/scripts/shared/cli_helpers.js +0 -91
  89. package/lib/scripts/shared/util.d.ts +0 -4
  90. package/lib/scripts/shared/util.js +0 -35
  91. package/lib/scripts/test_iter_load.d.ts +0 -1
  92. package/lib/scripts/test_iter_load.js +0 -12
  93. package/lib/src/utils/nvs-timecode.d.ts +0 -17
  94. package/lib/src/utils/nvs-timecode.js +0 -79
  95. package/lib/src/utils/weights_scoring_config.d.ts +0 -2
  96. package/lib/src/utils/weights_scoring_config.js +0 -15
  97. package/lib/strings.d.ts +0 -1
  98. package/lib/strings.js +0 -18
  99. package/lib/types/agenda.d.ts +0 -44
  100. package/lib/types/agenda.js +0 -1
  101. package/lib/types/ameli.d.ts +0 -5
  102. package/lib/types/ameli.js +0 -1
  103. package/lib/types/compte_rendu.d.ts +0 -83
  104. package/lib/types/compte_rendu.js +0 -1
  105. package/lib/types/debats.d.ts +0 -2
  106. package/lib/types/debats.js +0 -1
  107. package/lib/types/dosleg.d.ts +0 -70
  108. package/lib/types/dosleg.js +0 -1
  109. package/lib/types/questions.d.ts +0 -2
  110. package/lib/types/questions.js +0 -1
  111. package/lib/types/sens.d.ts +0 -10
  112. package/lib/types/sens.js +0 -1
  113. package/lib/types/sessions.d.ts +0 -5
  114. package/lib/types/sessions.js +0 -84
  115. package/lib/types/texte.d.ts +0 -74
  116. package/lib/types/texte.js +0 -16
  117. package/lib/utils/cr_spliting.d.ts +0 -28
  118. package/lib/utils/cr_spliting.js +0 -265
  119. package/lib/utils/date.d.ts +0 -10
  120. package/lib/utils/date.js +0 -100
  121. package/lib/utils/nvs-timecode.d.ts +0 -7
  122. package/lib/utils/nvs-timecode.js +0 -79
  123. package/lib/utils/reunion_grouping.d.ts +0 -11
  124. package/lib/utils/reunion_grouping.js +0 -337
  125. package/lib/utils/reunion_odj_building.d.ts +0 -5
  126. package/lib/utils/reunion_odj_building.js +0 -154
  127. package/lib/utils/reunion_parsing.d.ts +0 -23
  128. package/lib/utils/reunion_parsing.js +0 -209
  129. package/lib/utils/scoring.d.ts +0 -14
  130. package/lib/utils/scoring.js +0 -147
  131. package/lib/utils/string_cleaning.d.ts +0 -7
  132. package/lib/utils/string_cleaning.js +0 -57
  133. package/lib/validators/config.d.ts +0 -9
  134. package/lib/validators/config.js +0 -10
@@ -1,316 +0,0 @@
1
- import assert from "assert";
2
- import { execSync } from "child_process";
3
- import commandLineArgs from "command-line-args";
4
- import fs from "fs-extra";
5
- import path from "path";
6
- import StreamZip from "node-stream-zip";
7
- import readline from "readline";
8
- import * as windows1252 from "windows-1252";
9
- import { pipeline } from "stream";
10
- import { promisify } from "util";
11
- import config from "../config";
12
- import { getChosenDatasets, getEnabledDatasets } from "../datasets";
13
- import { commonOptions } from "./shared/cli_helpers";
14
- const badWindows1252CharacterRegex = /[\u0080-\u009f]/g;
15
- const optionsDefinitions = [
16
- ...commonOptions,
17
- {
18
- alias: "a",
19
- help: "all options: fetch, unzip, repair-encoding, import",
20
- name: "all",
21
- type: Boolean,
22
- },
23
- {
24
- alias: "c",
25
- help: "create TypeScript interfaces from databases schemas into src/raw_types_* directories",
26
- name: "schema",
27
- type: Boolean,
28
- },
29
- {
30
- alias: "e",
31
- help: "repair Windows CP 1252 encoding of SQL dumps",
32
- name: "repairEncoding",
33
- type: Boolean,
34
- },
35
- {
36
- alias: "f",
37
- help: "fetch datasets instead of retrieving them from files",
38
- name: "fetch",
39
- type: Boolean,
40
- },
41
- {
42
- alias: "i",
43
- help: "import SQL dumps into a freshly (re-)created database",
44
- name: "import",
45
- type: Boolean,
46
- },
47
- {
48
- alias: "S",
49
- help: "sudo psql commands with given user",
50
- name: "sudo",
51
- type: String,
52
- },
53
- {
54
- alias: "z",
55
- help: "unzip SQL files",
56
- name: "unzip",
57
- type: Boolean,
58
- },
59
- ];
60
- const options = commandLineArgs(optionsDefinitions);
61
- const streamPipeline = promisify(pipeline);
62
- async function downloadFile(url, dest) {
63
- const response = await fetch(url);
64
- if (!response.ok) {
65
- throw new Error(`Download failed ${response.status} ${response.statusText} for ${url}`);
66
- }
67
- await streamPipeline(response.body, fs.createWriteStream(dest));
68
- }
69
- /**
70
- * Copy a dataset database to the main Senat database (overwriting its contents).
71
- */
72
- async function copyToSenat(dataset, dataDir, options) {
73
- if (!options["silent"]) {
74
- console.log(`Copying ${dataset.database} to Senat database...`);
75
- }
76
- const sqlFilePath = path.join(dataDir, `${dataset.database}.sql`);
77
- const schemaDumpFile = path.join(dataDir, `${dataset.database}_schema_dump.sql`);
78
- // Write the header and then stream the rest of the SQL file
79
- const schemaSqlWriter = fs.createWriteStream(schemaDumpFile, { encoding: "utf8" });
80
- // Add CREATE SCHEMA statement at the top
81
- schemaSqlWriter.write(`DROP SCHEMA IF EXISTS ${dataset.database} CASCADE;\n`);
82
- schemaSqlWriter.write(`CREATE SCHEMA IF NOT EXISTS ${dataset.database};\n`);
83
- schemaSqlWriter.write(`GRANT USAGE ON SCHEMA ${dataset.database} TO ${config.db.user};\n`);
84
- schemaSqlWriter.write(`GRANT SELECT ON ALL TABLES IN SCHEMA ${dataset.database} TO ${config.db.user};\n`);
85
- schemaSqlWriter.write(`ALTER DEFAULT PRIVILEGES IN SCHEMA ${dataset.database} GRANT SELECT ON TABLES TO ${config.db.user};\n`);
86
- const lineReader = readline.createInterface({
87
- input: fs.createReadStream(sqlFilePath, { encoding: "utf8" }),
88
- crlfDelay: Infinity,
89
- });
90
- for await (const line of lineReader) {
91
- let newLine = line;
92
- // Replace 'public' schema outside single-quoted strings
93
- function replacePublicOutsideStrings(line, schema) {
94
- const parts = line.split(/(')/);
95
- let inString = false;
96
- for (let i = 0; i < parts.length; i++) {
97
- if (parts[i] === "'") {
98
- inString = !inString;
99
- }
100
- else if (!inString) {
101
- // Only replace outside of strings, including before comma
102
- parts[i] = parts[i].replace(/\bpublic\b(?=(\s*\.|\s*[,;]|\s|$))/g, schema);
103
- }
104
- }
105
- return parts.join("");
106
- }
107
- newLine = replacePublicOutsideStrings(line, dataset.database);
108
- // Replace SET client_encoding to UTF8
109
- newLine = newLine.replace(/SET client_encoding = 'LATIN1';/i, "SET client_encoding = 'UTF8';");
110
- schemaSqlWriter.write(newLine + "\n");
111
- }
112
- schemaSqlWriter.end();
113
- await new Promise((resolve, reject) => {
114
- schemaSqlWriter.on("finish", () => {
115
- try {
116
- execSync(`${options["sudo"] ? `sudo -u ${options["sudo"]} ` : ""}psql --quiet -d senat -f ${schemaDumpFile}`, {
117
- env: process.env,
118
- encoding: "utf-8",
119
- stdio: ["ignore", "pipe", "pipe"],
120
- });
121
- }
122
- catch (error) {
123
- if (!options["silent"]) {
124
- console.error(`Failed to import ${dataset.database} schema:`);
125
- if (error.stderr) {
126
- console.error(error.stderr);
127
- }
128
- if (error.stdout) {
129
- console.error(error.stdout);
130
- }
131
- }
132
- }
133
- resolve();
134
- });
135
- schemaSqlWriter.on("error", reject);
136
- });
137
- }
138
- async function retrieveDataset(dataDir, dataset) {
139
- const zipFilename = dataset.url.substring(dataset.url.lastIndexOf("/") + 1);
140
- const zipFilePath = path.join(dataDir, zipFilename);
141
- if (options["all"] || options["fetch"]) {
142
- // Fetch & save ZIP file.
143
- if (!options["silent"]) {
144
- console.log(`Loading ${dataset.title}: ${zipFilename}…`);
145
- }
146
- // Fetch fails with OpenSSL error: dh key too small.
147
- // (so does "curl").
148
- // const response = await fetch(dataset.url)
149
- // if (!response.ok) {
150
- // console.error(response.status, response.statusText)
151
- // console.error(await response.text())
152
- // throw new Error(`Fetch failed: ${dataset.url}`)
153
- // }
154
- // await pipeline(response.body!, fs.createWriteStream(zipFilePath))
155
- fs.removeSync(zipFilePath);
156
- await downloadFile(dataset.url, zipFilePath);
157
- }
158
- const sqlFilename = `${dataset.database}.sql`;
159
- const sqlFilePath = path.join(dataDir, sqlFilename);
160
- if (options["all"] || options["unzip"]) {
161
- if (!options["silent"]) {
162
- console.log(`Unzipping ${dataset.title}: ${zipFilename}…`);
163
- }
164
- fs.removeSync(sqlFilePath);
165
- const zip = new StreamZip({
166
- file: zipFilePath,
167
- storeEntries: true,
168
- });
169
- await new Promise((resolve, reject) => {
170
- zip.on("ready", () => {
171
- zip.extract(null, dataDir, (err, _count) => {
172
- zip.close();
173
- if (err) {
174
- reject(err);
175
- }
176
- else {
177
- resolve(null);
178
- }
179
- });
180
- });
181
- });
182
- if (dataset.repairZip !== undefined) {
183
- if (!options["silent"]) {
184
- console.log(`Repairing Zip path ${dataset.title}: ${sqlFilename}…`);
185
- }
186
- dataset.repairZip(dataset, dataDir);
187
- }
188
- }
189
- if ((options["all"] || options["repairEncoding"]) && dataset.repairEncoding) {
190
- if (!options["silent"]) {
191
- console.log(`Repairing Windows CP1252 encoding in ${dataset.title}: ${sqlFilename}…`);
192
- }
193
- const repairedSqlFilePath = sqlFilePath + ".repaired";
194
- const repairedSqlWriter = fs.createWriteStream(repairedSqlFilePath, {
195
- encoding: "utf8",
196
- });
197
- // Read the file as latin1 (ISO-8859-1/CP1252) and write as UTF-8
198
- const lineReader = readline.createInterface({
199
- input: fs.createReadStream(sqlFilePath, { encoding: "latin1" }),
200
- crlfDelay: Infinity,
201
- });
202
- for await (const line of lineReader) {
203
- // Optionally repair Windows-1252 control characters
204
- let repairedLine = line.replace(badWindows1252CharacterRegex, (match) => windows1252.decode(match, { mode: "fatal" }));
205
- repairedSqlWriter.write(repairedLine + "\n");
206
- }
207
- repairedSqlWriter.end();
208
- await fs.move(repairedSqlFilePath, sqlFilePath, { overwrite: true });
209
- }
210
- if (options["all"] || options["import"] || options["schema"]) {
211
- if (!options["silent"]) {
212
- console.log(`Importing ${dataset.title}: ${sqlFilename}…`);
213
- }
214
- await copyToSenat(dataset, dataDir, options);
215
- // Create indexes programmatically after import
216
- if (dataset.indexes) {
217
- for (const [table, indexes] of Object.entries(dataset.indexes)) {
218
- for (const index of indexes) {
219
- const indexName = index.name;
220
- const columns = index.columns.join(", ");
221
- const schema = dataset.database;
222
- const sql = `CREATE INDEX IF NOT EXISTS ${indexName} ON ${schema}.${table} (${columns});`;
223
- try {
224
- execSync(`${options["sudo"] ? `sudo -u ${options["sudo"]} ` : ""}psql --quiet -d senat -c "${sql}"`, {
225
- env: process.env,
226
- encoding: "utf-8",
227
- stdio: ["ignore", "ignore", "pipe"],
228
- });
229
- if (!options["silent"]) {
230
- console.log(`Created index: ${indexName} on ${schema}.${table} (${columns})`);
231
- }
232
- }
233
- catch (err) {
234
- console.error(`Failed to create index ${indexName} on ${schema}.${table}:`, err);
235
- }
236
- }
237
- }
238
- }
239
- }
240
- if (options["schema"]) {
241
- let definitionsDir = path.resolve("src", "raw_types_schemats");
242
- assert(fs.statSync(definitionsDir).isDirectory());
243
- if (!options["silent"]) {
244
- console.log(`Creating TypeScript definitions from schema '${dataset.database}' in database 'senat'…`);
245
- }
246
- const dbConnectionString = `postgres://${process.env["PGUSER"]}:${process.env["PGPASSWORD"]}@${process.env["PGHOST"]}:${process.env["PGPORT"]}/senat`;
247
- let definitionFilePath = path.join(definitionsDir, `${dataset.database}.ts`);
248
- execSync(`npx schemats generate -c ${dbConnectionString} -s ${dataset.database} -o ${definitionFilePath}`, {
249
- // cwd: dataDir,
250
- env: process.env,
251
- encoding: "utf-8",
252
- // stdio: ["ignore", "ignore", "pipe"],
253
- });
254
- const definition = fs.readFileSync(definitionFilePath, { encoding: "utf8" });
255
- const definitionRepaired = definition
256
- .replace(/\r\n/g, "\n")
257
- .replace(/AUTO-GENERATED FILE @ \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}/, "AUTO-GENERATED FILE");
258
- fs.writeFileSync(definitionFilePath, definitionRepaired);
259
- definitionsDir = path.resolve("src", "raw_types");
260
- definitionFilePath = path.join(definitionsDir, `${dataset.database}.ts`);
261
- execSync(`npx kysely-codegen --url '${dbConnectionString}' --default-schema ${dataset.database} --include-pattern '${dataset.database}.*' --out-file ${definitionFilePath}`, {
262
- env: process.env,
263
- encoding: "utf-8",
264
- // stdio: ["ignore", "ignore", "pipe"],
265
- });
266
- }
267
- }
268
- async function retrieveOpenData() {
269
- const dataDir = options["dataDir"];
270
- assert(dataDir, "Missing argument: data directory");
271
- process.env = {
272
- ...process.env,
273
- PGHOST: process.env["PGHOST"] || config.db.host,
274
- PGPORT: process.env["PGPORT"] || String(config.db.port),
275
- PGDATABASE: process.env["PGDATABASE"] || config.db.name,
276
- PGUSER: process.env["PGUSER"] || config.db.user,
277
- PGPASSWORD: process.env["PGPASSWORD"] || config.db.password,
278
- };
279
- assert(process.env["PGHOST"] && process.env["PGPORT"] && process.env["PGUSER"] && process.env["PGPASSWORD"], "Missing database configuration: environment variables PGHOST, PGPORT, PGUSER and PGPASSWORD or TRICOTEUSES_SENAT_DB_* in .env file");
280
- console.time("data extraction time");
281
- // Create role 'opendata' if it does not exist
282
- execSync(`${options["sudo"] ? `sudo -u ${options["sudo"]} ` : ""}psql --quiet -c "CREATE ROLE opendata" || true`, {
283
- cwd: dataDir,
284
- env: process.env,
285
- encoding: "utf-8",
286
- });
287
- execSync(`${options["sudo"] ? `sudo -u ${options["sudo"]} ` : ""}psql --quiet -c "CREATE DATABASE senat WITH OWNER opendata" || true`, {
288
- cwd: dataDir,
289
- env: process.env,
290
- encoding: "utf-8",
291
- });
292
- const enabledDatasets = getEnabledDatasets(options["categories"]);
293
- const chosenDatasets = getChosenDatasets(enabledDatasets);
294
- for (const dataset of chosenDatasets) {
295
- await retrieveDataset(dataDir, dataset);
296
- }
297
- if (options["schema"]) {
298
- const dbConnectionString = `postgres://${process.env["PGUSER"]}:${process.env["PGPASSWORD"]}@${process.env["PGHOST"]}:${process.env["PGPORT"]}/senat`;
299
- const definitionsDir = path.resolve("src", "raw_types");
300
- const definitionFilePath = path.join(definitionsDir, `senat.ts`);
301
- execSync(`npx kysely-codegen --url '${dbConnectionString}' --out-file ${definitionFilePath}`, {
302
- env: process.env,
303
- encoding: "utf-8",
304
- // stdio: ["ignore", "ignore", "pipe"],
305
- });
306
- }
307
- if (!options["silent"]) {
308
- console.timeEnd("data extraction time");
309
- }
310
- }
311
- retrieveOpenData()
312
- .then(() => process.exit(0))
313
- .catch((error) => {
314
- console.log(error);
315
- process.exit(1);
316
- });
@@ -1 +0,0 @@
1
- export {};
@@ -1,147 +0,0 @@
1
- import assert from "assert";
2
- import { execSync } from "child_process";
3
- import commandLineArgs from "command-line-args";
4
- import fs from "fs-extra";
5
- // import fetch from "node-fetch"
6
- import path from "path";
7
- // import stream from "stream"
8
- // import util from "util"
9
- import { findActif as findActifSenateurs } from "../model/sens";
10
- import { slugify } from "../strings";
11
- import { commonOptions } from "./shared/cli_helpers";
12
- const optionsDefinitions = [
13
- ...commonOptions,
14
- {
15
- alias: "f",
16
- help: "fetch sénateurs' pictures instead of retrieving them from files",
17
- name: "fetch",
18
- type: Boolean,
19
- },
20
- ];
21
- const options = commandLineArgs(optionsDefinitions);
22
- // const pipeline = util.promisify(stream.pipeline)
23
- async function retrievePhotosSenateurs() {
24
- const dataDir = options["dataDir"];
25
- assert(dataDir, "Missing argument: data directory");
26
- const photosDir = path.join(dataDir, "photos_senateurs");
27
- const missingPhotoFilePath = path.resolve(__dirname, "images", "transparent_155x225.jpg");
28
- const sens = await Array.fromAsync(findActifSenateurs());
29
- // Download photos.
30
- fs.ensureDirSync(photosDir);
31
- if (options["fetch"]) {
32
- for (const sen of sens) {
33
- const photoStem = `${slugify(sen.sennomuse, "_")}_${slugify(sen.senprenomuse, "_")}${slugify(sen.senmat, "_")}`;
34
- const photoFilename = photoStem + ".jpg";
35
- const photoFilePath = path.join(photosDir, photoFilename);
36
- const photoTempFilename = photoStem + "_temp.jpg";
37
- const photoTempFilePath = path.join(photosDir, photoTempFilename);
38
- const urlPhoto = `https://www.senat.fr/senimg/${photoFilename}`;
39
- if (!options["silent"]) {
40
- console.log(`Loading photo ${urlPhoto} for ${sen.senprenomuse} ${sen.sennomuse}…`);
41
- }
42
- // Fetch fails with OpenSSL error: dh key too small.
43
- // (so does "curl").
44
- // for (let retries = 0; retries < 3; retries++) {
45
- // const response = await fetch(urlPhoto)
46
- // if (response.ok) {
47
- // await pipeline(response.body, fs.createWriteStream(photoTempFilePath))
48
- // fs.renameSync(photoTempFilePath, photoFilePath)
49
- // break
50
- // }
51
- // if (retries >= 2) {
52
- // console.warn(`Fetch failed: ${urlPhoto} (${sen.senprenomuse} ${sen.sennomuse})`)
53
- // console.warn(response.status, response.statusText)
54
- // console.warn(await response.text())
55
- // if (fs.existsSync(photoFilePath)) {
56
- // console.warn(" => Reusing existing image")
57
- // } else {
58
- // console.warn(" => Using blank image")
59
- // fs.copyFileSync(missingPhotoFilePath, photoFilePath)
60
- // }
61
- // break
62
- // }
63
- // }
64
- try {
65
- execSync(`wget --quiet -O ${photoTempFilename} ${urlPhoto}`, {
66
- cwd: photosDir,
67
- env: process.env,
68
- encoding: "utf-8",
69
- // stdio: ["ignore", "ignore", "pipe"],
70
- });
71
- fs.renameSync(photoTempFilePath, photoFilePath);
72
- }
73
- catch (error) {
74
- if (typeof error === "object" && error && "status" in error && error.status === 8) {
75
- console.error(`Unable to load photo for ${sen.senprenomuse} ${sen.sennomuse}`);
76
- continue;
77
- }
78
- throw error;
79
- }
80
- }
81
- }
82
- // Resize photos to 155x225, because some haven't exactly this size.
83
- for (const sen of sens) {
84
- const photoStem = `${slugify(sen.sennomuse, "_")}_${slugify(sen.senprenomuse, "_")}${slugify(sen.senmat, "_")}`;
85
- const photoFilename = photoStem + ".jpg";
86
- const photoFilePath = path.join(photosDir, photoFilename);
87
- if (fs.existsSync(photoFilePath)) {
88
- if (!options["silent"]) {
89
- console.log(`Resizing photo ${photoStem} for ${sen.senprenomuse} ${sen.sennomuse}…`);
90
- }
91
- execSync(`gm convert -resize 155x225! ${photoStem}.jpg ${photoStem}_155x225.jpg`, {
92
- cwd: photosDir,
93
- });
94
- }
95
- else {
96
- if (!options["silent"]) {
97
- console.warn(`Missing photo for ${sen.senprenomuse} ${sen.sennomuse}: using blank image`);
98
- }
99
- fs.copyFileSync(missingPhotoFilePath, path.join(photosDir, `${photoStem}_155x225.jpg`));
100
- }
101
- }
102
- // Create a mosaic of photos.
103
- if (!options["silent"]) {
104
- console.log("Creating mosaic of photos…");
105
- }
106
- const photoBySenmat = {};
107
- const rowsFilenames = [];
108
- for (let senIndex = 0, rowIndex = 0; senIndex < sens.length; senIndex += 25, rowIndex++) {
109
- const row = sens.slice(senIndex, senIndex + 25);
110
- const photosFilenames = [];
111
- for (const [columnIndex, sen] of row.entries()) {
112
- const photoStem = `${slugify(sen.sennomuse, "_")}_${slugify(sen.senprenomuse, "_")}${slugify(sen.senmat, "_")}`;
113
- const photoFilename = `${photoStem}_155x225.jpg`;
114
- photosFilenames.push(photoFilename);
115
- photoBySenmat[sen.senmat] = {
116
- chemin: `photos_senateurs/${photoFilename}`,
117
- cheminMosaique: "photos_senateurs/senateurs.jpg",
118
- hauteur: 225,
119
- largeur: 155,
120
- xMosaique: columnIndex * 155,
121
- yMosaique: rowIndex * 225,
122
- };
123
- }
124
- const rowFilename = `row-${rowIndex}.jpg`;
125
- execSync(`gm convert ${photosFilenames.join(" ")} +append ${rowFilename}`, {
126
- cwd: photosDir,
127
- });
128
- rowsFilenames.push(rowFilename);
129
- }
130
- execSync(`gm convert ${rowsFilenames.join(" ")} -append senateurs.jpg`, {
131
- cwd: photosDir,
132
- });
133
- for (const rowFilename of rowsFilenames) {
134
- fs.unlinkSync(path.join(photosDir, rowFilename));
135
- }
136
- if (!options["silent"]) {
137
- console.log("Creating JSON file containing informations on all pictures…");
138
- }
139
- const jsonFilePath = path.join(photosDir, "senateurs.json");
140
- fs.writeFileSync(jsonFilePath, JSON.stringify(photoBySenmat, null, 2));
141
- }
142
- retrievePhotosSenateurs()
143
- .then(() => process.exit(0))
144
- .catch((error) => {
145
- console.log(error);
146
- process.exit(1);
147
- });
@@ -1 +0,0 @@
1
- export {};