@tricoteuses/senat 2.9.0 → 2.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. package/README.md +1 -0
  2. package/package.json +1 -1
  3. package/lib/aggregates.d.ts +0 -52
  4. package/lib/aggregates.js +0 -949
  5. package/lib/aggregates.mjs +0 -726
  6. package/lib/aggregates.ts +0 -852
  7. package/lib/config.mjs +0 -16
  8. package/lib/config.ts +0 -26
  9. package/lib/databases.mjs +0 -55
  10. package/lib/databases.ts +0 -68
  11. package/lib/datasets.mjs +0 -78
  12. package/lib/datasets.ts +0 -118
  13. package/lib/fields.d.ts +0 -10
  14. package/lib/fields.js +0 -68
  15. package/lib/fields.mjs +0 -22
  16. package/lib/fields.ts +0 -29
  17. package/lib/index.mjs +0 -7
  18. package/lib/index.ts +0 -64
  19. package/lib/inserters.d.ts +0 -98
  20. package/lib/inserters.js +0 -500
  21. package/lib/inserters.mjs +0 -360
  22. package/lib/inserters.ts +0 -521
  23. package/lib/legislatures.json +0 -38
  24. package/lib/loaders.mjs +0 -97
  25. package/lib/loaders.ts +0 -173
  26. package/lib/model/ameli.mjs +0 -57
  27. package/lib/model/ameli.ts +0 -86
  28. package/lib/model/debats.mjs +0 -43
  29. package/lib/model/debats.ts +0 -68
  30. package/lib/model/dosleg.mjs +0 -163
  31. package/lib/model/dosleg.ts +0 -204
  32. package/lib/model/index.mjs +0 -4
  33. package/lib/model/index.ts +0 -13
  34. package/lib/model/questions.mjs +0 -76
  35. package/lib/model/questions.ts +0 -102
  36. package/lib/model/sens.mjs +0 -339
  37. package/lib/model/sens.ts +0 -432
  38. package/lib/model/texte.mjs +0 -156
  39. package/lib/model/texte.ts +0 -174
  40. package/lib/raw_types_kysely/ameli.d.ts +0 -915
  41. package/lib/raw_types_kysely/ameli.js +0 -7
  42. package/lib/raw_types_kysely/ameli.mjs +0 -5
  43. package/lib/raw_types_kysely/ameli.ts +0 -951
  44. package/lib/raw_types_kysely/debats.d.ts +0 -207
  45. package/lib/raw_types_kysely/debats.js +0 -7
  46. package/lib/raw_types_kysely/debats.mjs +0 -5
  47. package/lib/raw_types_kysely/debats.ts +0 -222
  48. package/lib/raw_types_kysely/dosleg.d.ts +0 -3532
  49. package/lib/raw_types_kysely/dosleg.js +0 -7
  50. package/lib/raw_types_kysely/dosleg.mjs +0 -5
  51. package/lib/raw_types_kysely/dosleg.ts +0 -3621
  52. package/lib/raw_types_kysely/questions.d.ts +0 -414
  53. package/lib/raw_types_kysely/questions.js +0 -7
  54. package/lib/raw_types_kysely/questions.mjs +0 -5
  55. package/lib/raw_types_kysely/questions.ts +0 -426
  56. package/lib/raw_types_kysely/sens.d.ts +0 -4394
  57. package/lib/raw_types_kysely/sens.js +0 -7
  58. package/lib/raw_types_kysely/sens.mjs +0 -5
  59. package/lib/raw_types_kysely/sens.ts +0 -4499
  60. package/lib/raw_types_schemats/ameli.mjs +0 -2
  61. package/lib/raw_types_schemats/ameli.ts +0 -601
  62. package/lib/raw_types_schemats/debats.mjs +0 -2
  63. package/lib/raw_types_schemats/debats.ts +0 -145
  64. package/lib/raw_types_schemats/dosleg.mjs +0 -2
  65. package/lib/raw_types_schemats/dosleg.ts +0 -2193
  66. package/lib/raw_types_schemats/questions.mjs +0 -2
  67. package/lib/raw_types_schemats/questions.ts +0 -249
  68. package/lib/raw_types_schemats/sens.mjs +0 -2
  69. package/lib/raw_types_schemats/sens.ts +0 -2907
  70. package/lib/scripts/convert_data.mjs +0 -95
  71. package/lib/scripts/convert_data.ts +0 -119
  72. package/lib/scripts/datautil.mjs +0 -16
  73. package/lib/scripts/datautil.ts +0 -19
  74. package/lib/scripts/images/transparent_150x192.jpg +0 -0
  75. package/lib/scripts/images/transparent_155x225.jpg +0 -0
  76. package/lib/scripts/parse_textes.mjs +0 -38
  77. package/lib/scripts/parse_textes.ts +0 -52
  78. package/lib/scripts/retrieve_documents.mjs +0 -243
  79. package/lib/scripts/retrieve_documents.ts +0 -279
  80. package/lib/scripts/retrieve_open_data.mjs +0 -214
  81. package/lib/scripts/retrieve_open_data.ts +0 -261
  82. package/lib/scripts/retrieve_senateurs_photos.mjs +0 -147
  83. package/lib/scripts/retrieve_senateurs_photos.ts +0 -177
  84. package/lib/scripts/retrieve_textes.d.ts +0 -1
  85. package/lib/scripts/retrieve_textes.mjs +0 -165
  86. package/lib/scripts/retrieve_textes.ts +0 -79
  87. package/lib/scripts/shared/cli_helpers.ts +0 -36
  88. package/lib/scripts/shared/util.ts +0 -33
  89. package/lib/strings.mjs +0 -18
  90. package/lib/strings.ts +0 -26
  91. package/lib/types/ameli.mjs +0 -13
  92. package/lib/types/ameli.ts +0 -21
  93. package/lib/types/debats.mjs +0 -2
  94. package/lib/types/debats.ts +0 -6
  95. package/lib/types/dosleg.mjs +0 -151
  96. package/lib/types/dosleg.ts +0 -284
  97. package/lib/types/questions.mjs +0 -1
  98. package/lib/types/questions.ts +0 -3
  99. package/lib/types/sens.mjs +0 -1
  100. package/lib/types/sens.ts +0 -12
  101. package/lib/types/sessions.mjs +0 -43
  102. package/lib/types/sessions.ts +0 -42
  103. package/lib/types/texte.mjs +0 -16
  104. package/lib/types/texte.ts +0 -66
  105. package/lib/typings/windows-1252.d.js +0 -2
  106. package/lib/typings/windows-1252.d.mjs +0 -2
  107. package/lib/typings/windows-1252.d.ts +0 -11
  108. package/lib/validators/config.mjs +0 -54
  109. package/lib/validators/config.ts +0 -79
  110. package/lib/validators/senat.d.ts +0 -0
  111. package/lib/validators/senat.js +0 -24
  112. package/lib/validators/senat.mjs +0 -24
  113. package/lib/validators/senat.ts +0 -26
@@ -1,261 +0,0 @@
1
- import assert from "assert"
2
- import { execSync } from "child_process"
3
- import commandLineArgs from "command-line-args"
4
- import fs from "fs-extra"
5
- // import fetch from "node-fetch"
6
- import path from "path"
7
- // import stream from "stream"
8
- import StreamZip from "node-stream-zip"
9
- import readline from "readline"
10
- // import util from "util"
11
- import windows1252 from "windows-1252"
12
-
13
- import config from "../config"
14
- import {
15
- Dataset,
16
- getChosenFromEnabledDatasets,
17
- } from "../datasets"
18
- import { commonOptions } from "./shared/cli_helpers"
19
-
20
- const badWindows1252CharacterRegex = /[\u0080-\u009f]/g
21
- const optionsDefinitions = [
22
- ...commonOptions,
23
- {
24
- alias: "a",
25
- help: "all options: fetch, unzip, repair-encoding, import",
26
- name: "all",
27
- type: Boolean,
28
- },
29
- {
30
- alias: "f",
31
- help: "fetch datasets instead of retrieving them from files",
32
- name: "fetch",
33
- type: Boolean,
34
- },
35
- {
36
- alias: "z",
37
- help: "unzip SQL files",
38
- name: "unzip",
39
- type: Boolean,
40
- },
41
- {
42
- alias: "e",
43
- help: "repair Windows CP 1252 encoding of SQL dumps",
44
- name: "repairEncoding",
45
- type: Boolean,
46
- },
47
- {
48
- alias: "i",
49
- help: "import SQL dumps into a freshly (re-)created database",
50
- name: "import",
51
- type: Boolean,
52
- },
53
- {
54
- alias: "d",
55
- help: "repair database (update schema and types)",
56
- name: "repairDatabase",
57
- type: Boolean,
58
- },
59
- {
60
- alias: "c",
61
- help:
62
- "create TypeScript interfaces from databases schemas into src/raw_types_* directories",
63
- name: "schema",
64
- type: Boolean,
65
- },
66
- ]
67
- const options = commandLineArgs(optionsDefinitions)
68
-
69
- // const pipeline = util.promisify(stream.pipeline)
70
-
71
- async function retrieveDataset(
72
- dataDir: string,
73
- dataset: Dataset,
74
- ): Promise<void> {
75
- const zipFilename = dataset.url.substring(dataset.url.lastIndexOf("/") + 1)
76
- const zipFilePath = path.join(dataDir, zipFilename)
77
- if (options.all || options.fetch) {
78
- // Fetch & save ZIP file.
79
- if (!options.silent) {
80
- console.log(`Loading ${dataset.title}: ${zipFilename}…`)
81
- }
82
- // Fetch fails with OpenSSL error: dh key too small.
83
- // (so does "curl").
84
- // const response = await fetch(dataset.url)
85
- // if (!response.ok) {
86
- // console.error(response.status, response.statusText)
87
- // console.error(await response.text())
88
- // throw new Error(`Fetch failed: ${dataset.url}`)
89
- // }
90
- // await pipeline(response.body!, fs.createWriteStream(zipFilePath))
91
- fs.removeSync(zipFilePath)
92
- execSync(`wget --quiet ${dataset.url}`, {
93
- cwd: dataDir,
94
- env: process.env,
95
- encoding: "utf-8",
96
- // stdio: ["ignore", "ignore", "pipe"],
97
- })
98
- }
99
-
100
- const sqlFilename = `${dataset.database}.sql`
101
- const sqlFilePath = path.join(dataDir, sqlFilename)
102
- if (options.all || options.unzip) {
103
- if (!options.silent) {
104
- console.log(`Unzipping ${dataset.title}: ${zipFilename}…`)
105
- }
106
- fs.removeSync(sqlFilePath)
107
- const zip = new StreamZip({
108
- file: zipFilePath,
109
- storeEntries: true,
110
- })
111
- await new Promise((resolve, reject) => {
112
- zip.on("ready", () => {
113
- zip.extract(null, dataDir, (err?: any, _count?: number) => {
114
- zip.close()
115
- if (err) {
116
- reject(err)
117
- } else {
118
- resolve(null)
119
- }
120
- })
121
- })
122
- })
123
- if (dataset.repairZip !== undefined) {
124
- if (!options.silent) {
125
- console.log(`Repairing Zip path ${dataset.title}: ${sqlFilename}…`)
126
- }
127
- dataset.repairZip(dataset, dataDir)
128
- }
129
- }
130
-
131
- if ((options.all || options.repairEncoding) && dataset.repairEncoding) {
132
- if (!options.silent) {
133
- console.log(
134
- `Repairing Windows CP1252 encoding of ${dataset.title}: ${sqlFilename}…`,
135
- )
136
- }
137
- const repairedSqlFilePath = sqlFilePath + ".repaired"
138
- const repairedSqlWriter = fs.createWriteStream(repairedSqlFilePath, {
139
- encoding: "utf8",
140
- })
141
- const lineReader = readline.createInterface({
142
- input: fs.createReadStream(sqlFilePath, { encoding: "utf8" }),
143
- crlfDelay: Infinity,
144
- })
145
- for await (const line of lineReader) {
146
- repairedSqlWriter.write(
147
- line.replace(badWindows1252CharacterRegex, (match) =>
148
- windows1252.decode(match, { mode: "fatal" }),
149
- ) + "\n",
150
- )
151
- }
152
- repairedSqlWriter.end()
153
- await fs.move(repairedSqlFilePath, sqlFilePath, { overwrite: true })
154
- }
155
-
156
- if (options.all || options.import) {
157
- if (!options.silent) {
158
- console.log(`Importing ${dataset.title}: ${sqlFilename}…`)
159
- }
160
- execSync(`psql -c "DROP DATABASE IF EXISTS ${dataset.database}"`, {
161
- cwd: dataDir,
162
- env: process.env,
163
- encoding: "utf-8",
164
- // stdio: ["ignore", "ignore", "pipe"],
165
- })
166
- execSync(
167
- `psql -c "CREATE DATABASE ${dataset.database} WITH OWNER opendata"`,
168
- {
169
- cwd: dataDir,
170
- env: process.env,
171
- encoding: "utf-8",
172
- // stdio: ["ignore", "ignore", "pipe"],
173
- },
174
- )
175
- execSync(`psql -f ${sqlFilename} ${dataset.database}`, {
176
- cwd: dataDir,
177
- env: process.env,
178
- encoding: "utf-8",
179
- // stdio: ["ignore", "ignore", "pipe"],
180
- })
181
- }
182
-
183
- if (options.schema) {
184
- let definitionsDir = path.resolve("src", "raw_types_schemats")
185
- assert(fs.statSync(definitionsDir).isDirectory())
186
- if (!options.silent) {
187
- console.log(
188
- `Creating TypeScript definitions from schema of database ${dataset.database}…`,
189
- )
190
- }
191
- const dbConnectionString = `postgres://${process.env.PGUSER}:${process.env.PGPASSWORD}@${process.env.PGHOST}:${process.env.PGPORT}/${dataset.database}`
192
- let definitionFilePath = path.join(
193
- definitionsDir,
194
- `${dataset.database}.ts`,
195
- )
196
- execSync(
197
- `npx schemats generate -c ${dbConnectionString} -s ${dataset.schema} -o ${definitionFilePath}`,
198
- {
199
- // cwd: dataDir,
200
- env: process.env,
201
- encoding: "utf-8",
202
- // stdio: ["ignore", "ignore", "pipe"],
203
- },
204
- )
205
- const definition = fs.readFileSync(definitionFilePath, { encoding: "utf8" })
206
- const definitionRepaired = definition
207
- .replace(/\r\n/g, "\n")
208
- .replace(
209
- /AUTO-GENERATED FILE @ \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}/,
210
- "AUTO-GENERATED FILE",
211
- )
212
- fs.writeFileSync(definitionFilePath, definitionRepaired)
213
-
214
- definitionsDir = path.resolve("src", "raw_types_kysely")
215
- definitionFilePath = path.join(
216
- definitionsDir,
217
- `${dataset.database}.ts`,
218
- )
219
- execSync(
220
- `kysely-codegen --url ${dbConnectionString} --schema=${dataset.schema} --out-file=${definitionFilePath}`,
221
- {
222
- // cwd: dataDir,
223
- env: process.env,
224
- encoding: "utf-8",
225
- // stdio: ["ignore", "ignore", "pipe"],
226
- },
227
- )
228
- }
229
- }
230
-
231
- async function retrieveOpenData(): Promise<void> {
232
- const dataDir = options.dataDir
233
- assert(dataDir, "Missing argument: data directory")
234
-
235
- process.env = {
236
- ...process.env,
237
- PGHOST: process.env.PGHOST || config.db.host,
238
- PGPORT: process.env.PGPORT || config.db.port,
239
- PGUSER: process.env.PGUSER || config.db.user,
240
- PGPASSWORD: process.env.PGPASSWORD || config.db.password,
241
- }
242
- assert(process.env.PGHOST
243
- && process.env.PGPORT
244
- && process.env.PGUSER
245
- && process.env.PGPASSWORD,
246
- "Missing database configuration: environment variables PGHOST, PGPORT, PGUSER and PGPASSWORD or TRICOTEUSES_SENAT_DB_* in .env file"
247
- )
248
-
249
- const choosenDatasets: Dataset[] = getChosenFromEnabledDatasets(options.categories)
250
- // await Promise.all(choosenDatasets.map(dataset => retrieveDataset(dataDir, dataset)))
251
- for (const dataset of choosenDatasets) {
252
- await retrieveDataset(dataDir, dataset)
253
- }
254
- }
255
-
256
- retrieveOpenData()
257
- .then(() => process.exit(0))
258
- .catch((error) => {
259
- console.log(error)
260
- process.exit(1)
261
- })
@@ -1,147 +0,0 @@
1
- import assert from "assert";
2
- import { execSync } from "child_process";
3
- import commandLineArgs from "command-line-args";
4
- import fs from "fs-extra";
5
- // import fetch from "node-fetch"
6
- import path from "path";
7
- // import stream from "stream"
8
- // import util from "util"
9
- import { findActif as findActifSenateurs } from "../model/sens";
10
- import { slugify } from "../strings";
11
- import { commonOptions } from "./shared/cli_helpers";
12
- const optionsDefinitions = [
13
- ...commonOptions,
14
- {
15
- alias: "f",
16
- help: "fetch sénateurs' pictures instead of retrieving them from files",
17
- name: "fetch",
18
- type: Boolean,
19
- },
20
- ];
21
- const options = commandLineArgs(optionsDefinitions);
22
- // const pipeline = util.promisify(stream.pipeline)
23
- async function retrievePhotosSenateurs() {
24
- const dataDir = options.dataDir;
25
- assert(dataDir, "Missing argument: data directory");
26
- const photosDir = path.join(dataDir, "photos_senateurs");
27
- const missingPhotoFilePath = path.resolve(__dirname, "images", "transparent_155x225.jpg");
28
- const sens = await Array.fromAsync(findActifSenateurs());
29
- // Download photos.
30
- fs.ensureDirSync(photosDir);
31
- if (options.fetch) {
32
- for (const sen of sens) {
33
- const photoStem = `${slugify(sen.sennomuse, "_")}_${slugify(sen.senprenomuse, "_")}${slugify(sen.senmat, "_")}`;
34
- const photoFilename = photoStem + ".jpg";
35
- const photoFilePath = path.join(photosDir, photoFilename);
36
- const photoTempFilename = photoStem + "_temp.jpg";
37
- const photoTempFilePath = path.join(photosDir, photoTempFilename);
38
- const urlPhoto = `https://www.senat.fr/senimg/${photoFilename}`;
39
- if (!options.silent) {
40
- console.log(`Loading photo ${urlPhoto} for ${sen.senprenomuse} ${sen.sennomuse}…`);
41
- }
42
- // Fetch fails with OpenSSL error: dh key too small.
43
- // (so does "curl").
44
- // for (let retries = 0; retries < 3; retries++) {
45
- // const response = await fetch(urlPhoto)
46
- // if (response.ok) {
47
- // await pipeline(response.body, fs.createWriteStream(photoTempFilePath))
48
- // fs.renameSync(photoTempFilePath, photoFilePath)
49
- // break
50
- // }
51
- // if (retries >= 2) {
52
- // console.warn(`Fetch failed: ${urlPhoto} (${sen.senprenomuse} ${sen.sennomuse})`)
53
- // console.warn(response.status, response.statusText)
54
- // console.warn(await response.text())
55
- // if (fs.existsSync(photoFilePath)) {
56
- // console.warn(" => Reusing existing image")
57
- // } else {
58
- // console.warn(" => Using blank image")
59
- // fs.copyFileSync(missingPhotoFilePath, photoFilePath)
60
- // }
61
- // break
62
- // }
63
- // }
64
- try {
65
- execSync(`wget --quiet -O ${photoTempFilename} ${urlPhoto}`, {
66
- cwd: photosDir,
67
- env: process.env,
68
- encoding: "utf-8",
69
- // stdio: ["ignore", "ignore", "pipe"],
70
- });
71
- fs.renameSync(photoTempFilePath, photoFilePath);
72
- }
73
- catch (error) {
74
- if (typeof error === "object" && error && "status" in error && error.status === 8) {
75
- console.error(`Unable to load photo for ${sen.senprenomuse} ${sen.sennomuse}`);
76
- continue;
77
- }
78
- throw error;
79
- }
80
- }
81
- }
82
- // Resize photos to 155x225, because some haven't exactly this size.
83
- for (const sen of sens) {
84
- const photoStem = `${slugify(sen.sennomuse, "_")}_${slugify(sen.senprenomuse, "_")}${slugify(sen.senmat, "_")}`;
85
- const photoFilename = photoStem + ".jpg";
86
- const photoFilePath = path.join(photosDir, photoFilename);
87
- if (fs.existsSync(photoFilePath)) {
88
- if (!options.silent) {
89
- console.log(`Resizing photo ${photoStem} for ${sen.senprenomuse} ${sen.sennomuse}…`);
90
- }
91
- execSync(`gm convert -resize 155x225! ${photoStem}.jpg ${photoStem}_155x225.jpg`, {
92
- cwd: photosDir,
93
- });
94
- }
95
- else {
96
- if (!options.silent) {
97
- console.warn(`Missing photo for ${sen.senprenomuse} ${sen.sennomuse}: using blank image`);
98
- }
99
- fs.copyFileSync(missingPhotoFilePath, path.join(photosDir, `${photoStem}_155x225.jpg`));
100
- }
101
- }
102
- // Create a mosaic of photos.
103
- if (!options.silent) {
104
- console.log("Creating mosaic of photos…");
105
- }
106
- const photoBySenmat = {};
107
- const rowsFilenames = [];
108
- for (let senIndex = 0, rowIndex = 0; senIndex < sens.length; senIndex += 25, rowIndex++) {
109
- const row = sens.slice(senIndex, senIndex + 25);
110
- const photosFilenames = [];
111
- for (const [columnIndex, sen] of row.entries()) {
112
- const photoStem = `${slugify(sen.sennomuse, "_")}_${slugify(sen.senprenomuse, "_")}${slugify(sen.senmat, "_")}`;
113
- const photoFilename = `${photoStem}_155x225.jpg`;
114
- photosFilenames.push(photoFilename);
115
- photoBySenmat[sen.senmat] = {
116
- chemin: `photos_senateurs/${photoFilename}`,
117
- cheminMosaique: "photos_senateurs/senateurs.jpg",
118
- hauteur: 225,
119
- largeur: 155,
120
- xMosaique: columnIndex * 155,
121
- yMosaique: rowIndex * 225,
122
- };
123
- }
124
- const rowFilename = `row-${rowIndex}.jpg`;
125
- execSync(`gm convert ${photosFilenames.join(" ")} +append ${rowFilename}`, {
126
- cwd: photosDir,
127
- });
128
- rowsFilenames.push(rowFilename);
129
- }
130
- execSync(`gm convert ${rowsFilenames.join(" ")} -append senateurs.jpg`, {
131
- cwd: photosDir,
132
- });
133
- for (const rowFilename of rowsFilenames) {
134
- fs.unlinkSync(path.join(photosDir, rowFilename));
135
- }
136
- if (!options.silent) {
137
- console.log("Creating JSON file containing informations on all pictures…");
138
- }
139
- const jsonFilePath = path.join(photosDir, "senateurs.json");
140
- fs.writeFileSync(jsonFilePath, JSON.stringify(photoBySenmat, null, 2));
141
- }
142
- retrievePhotosSenateurs()
143
- .then(() => process.exit(0))
144
- .catch((error) => {
145
- console.log(error);
146
- process.exit(1);
147
- });
@@ -1,177 +0,0 @@
1
- import assert from "assert"
2
- import { execSync } from "child_process"
3
- import commandLineArgs from "command-line-args"
4
- import fs from "fs-extra"
5
- // import fetch from "node-fetch"
6
- import path from "path"
7
- // import stream from "stream"
8
- // import util from "util"
9
-
10
- import { findActif as findActifSenateurs } from "../model/sens"
11
- import { slugify } from "../strings"
12
- import { Photo } from "../types/sens"
13
- import { commonOptions } from "./shared/cli_helpers"
14
-
15
- const optionsDefinitions = [
16
- ...commonOptions,
17
- {
18
- alias: "f",
19
- help: "fetch sénateurs' pictures instead of retrieving them from files",
20
- name: "fetch",
21
- type: Boolean,
22
- },
23
- ]
24
- const options = commandLineArgs(optionsDefinitions)
25
- // const pipeline = util.promisify(stream.pipeline)
26
-
27
- async function retrievePhotosSenateurs(): Promise<void> {
28
- const dataDir = options.dataDir
29
- assert(dataDir, "Missing argument: data directory")
30
-
31
- const photosDir = path.join(dataDir, "photos_senateurs")
32
- const missingPhotoFilePath = path.resolve(__dirname, "images", "transparent_155x225.jpg")
33
-
34
- const sens = await Array.fromAsync(findActifSenateurs())
35
-
36
- // Download photos.
37
- fs.ensureDirSync(photosDir)
38
- if (options.fetch) {
39
- for (const sen of sens) {
40
- const photoStem = `${slugify(sen.sennomuse, "_")}_${slugify(
41
- sen.senprenomuse,
42
- "_",
43
- )}${slugify(sen.senmat, "_")}`
44
- const photoFilename = photoStem + ".jpg"
45
- const photoFilePath = path.join(photosDir, photoFilename)
46
- const photoTempFilename = photoStem + "_temp.jpg"
47
- const photoTempFilePath = path.join(photosDir, photoTempFilename)
48
- const urlPhoto = `https://www.senat.fr/senimg/${photoFilename}`
49
- if (!options.silent) {
50
- console.log(
51
- `Loading photo ${urlPhoto} for ${sen.senprenomuse} ${sen.sennomuse}…`,
52
- )
53
- }
54
- // Fetch fails with OpenSSL error: dh key too small.
55
- // (so does "curl").
56
- // for (let retries = 0; retries < 3; retries++) {
57
- // const response = await fetch(urlPhoto)
58
- // if (response.ok) {
59
- // await pipeline(response.body, fs.createWriteStream(photoTempFilePath))
60
- // fs.renameSync(photoTempFilePath, photoFilePath)
61
- // break
62
- // }
63
- // if (retries >= 2) {
64
- // console.warn(`Fetch failed: ${urlPhoto} (${sen.senprenomuse} ${sen.sennomuse})`)
65
- // console.warn(response.status, response.statusText)
66
- // console.warn(await response.text())
67
- // if (fs.existsSync(photoFilePath)) {
68
- // console.warn(" => Reusing existing image")
69
- // } else {
70
- // console.warn(" => Using blank image")
71
- // fs.copyFileSync(missingPhotoFilePath, photoFilePath)
72
- // }
73
- // break
74
- // }
75
- // }
76
- try {
77
- execSync(`wget --quiet -O ${photoTempFilename} ${urlPhoto}`, {
78
- cwd: photosDir,
79
- env: process.env,
80
- encoding: "utf-8",
81
- // stdio: ["ignore", "ignore", "pipe"],
82
- })
83
- fs.renameSync(photoTempFilePath, photoFilePath)
84
- } catch (error) {
85
- if (typeof error === "object" && error && "status" in error && error.status === 8) {
86
- console.error(`Unable to load photo for ${sen.senprenomuse} ${sen.sennomuse}`)
87
- continue
88
- }
89
- throw error
90
- }
91
- }
92
- }
93
-
94
- // Resize photos to 155x225, because some haven't exactly this size.
95
- for (const sen of sens) {
96
- const photoStem = `${slugify(sen.sennomuse, "_")}_${slugify(
97
- sen.senprenomuse,
98
- "_",
99
- )}${slugify(sen.senmat, "_")}`
100
- const photoFilename = photoStem + ".jpg"
101
- const photoFilePath = path.join(photosDir, photoFilename)
102
- if (fs.existsSync(photoFilePath)) {
103
- if (!options.silent) {
104
- console.log(
105
- `Resizing photo ${photoStem} for ${sen.senprenomuse} ${sen.sennomuse}…`,
106
- )
107
- }
108
-
109
- execSync(
110
- `gm convert -resize 155x225! ${photoStem}.jpg ${photoStem}_155x225.jpg`,
111
- {
112
- cwd: photosDir,
113
- },
114
- )
115
- } else {
116
- if (!options.silent) {
117
- console.warn(`Missing photo for ${sen.senprenomuse} ${sen.sennomuse}: using blank image`)
118
- }
119
- fs.copyFileSync(missingPhotoFilePath, path.join(photosDir, `${photoStem}_155x225.jpg`))
120
- }
121
- }
122
-
123
- // Create a mosaic of photos.
124
- if (!options.silent) {
125
- console.log("Creating mosaic of photos…")
126
- }
127
- const photoBySenmat: { [senmat: string]: Photo } = {}
128
- const rowsFilenames: string[] = []
129
- for (
130
- let senIndex = 0, rowIndex = 0;
131
- senIndex < sens.length;
132
- senIndex += 25, rowIndex++
133
- ) {
134
- const row = sens.slice(senIndex, senIndex + 25)
135
- const photosFilenames: string[] = []
136
- for (const [columnIndex, sen] of row.entries()) {
137
- const photoStem = `${slugify(sen.sennomuse, "_")}_${slugify(
138
- sen.senprenomuse,
139
- "_",
140
- )}${slugify(sen.senmat, "_")}`
141
- const photoFilename = `${photoStem}_155x225.jpg`
142
- photosFilenames.push(photoFilename)
143
- photoBySenmat[sen.senmat] = {
144
- chemin: `photos_senateurs/${photoFilename}`,
145
- cheminMosaique: "photos_senateurs/senateurs.jpg",
146
- hauteur: 225,
147
- largeur: 155,
148
- xMosaique: columnIndex * 155,
149
- yMosaique: rowIndex * 225,
150
- }
151
- }
152
- const rowFilename = `row-${rowIndex}.jpg`
153
- execSync(`gm convert ${photosFilenames.join(" ")} +append ${rowFilename}`, {
154
- cwd: photosDir,
155
- })
156
- rowsFilenames.push(rowFilename)
157
- }
158
- execSync(`gm convert ${rowsFilenames.join(" ")} -append senateurs.jpg`, {
159
- cwd: photosDir,
160
- })
161
- for (const rowFilename of rowsFilenames) {
162
- fs.unlinkSync(path.join(photosDir, rowFilename))
163
- }
164
-
165
- if (!options.silent) {
166
- console.log("Creating JSON file containing informations on all pictures…")
167
- }
168
- const jsonFilePath = path.join(photosDir, "senateurs.json")
169
- fs.writeFileSync(jsonFilePath, JSON.stringify(photoBySenmat, null, 2))
170
- }
171
-
172
- retrievePhotosSenateurs()
173
- .then(() => process.exit(0))
174
- .catch((error) => {
175
- console.log(error)
176
- process.exit(1)
177
- })
@@ -1 +0,0 @@
1
- export {};