@tmlmobilidade/generate-offer-files 20250625.1536.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,60 @@
1
+ #!/usr/bin/env node
2
+ /* * */
3
+ import { generateOfferOutput } from './main.js';
4
+ import LOGGER from '@helperkits/logger';
5
+ import { ASCII_CM_SHORT } from '@tmlmobilidade/lib';
6
+ import { validateOperationalDate } from '@tmlmobilidade/types';
7
+ import { Command } from 'commander';
8
+ import fs from 'fs';
9
+ /* * */
10
+ (async function init() {
11
+ //
12
+ //
13
+ // Setup the program options
14
+ const program = new Command();
15
+ program
16
+ .name('Generate Offer Journeys')
17
+ .description('Output offer_journey.json files from GTFS files.')
18
+ .requiredOption('--file <path>', 'GTFS file path')
19
+ .requiredOption('--start-date <operational-date>', 'The start date of the plan in YYYYMMDD format')
20
+ .requiredOption('--end-date <operational-date>', 'The end date of the plan in YYYYMMDD format')
21
+ .option('--output-dir <path>', 'Output directory for the offer_journey.json files', './output')
22
+ .option('--override', 'Override output directory if it exists', false)
23
+ .option('--feed-id <value>', 'Optional feedId value to include in output files', null)
24
+ .parse();
25
+ //
26
+ // Validate the input options
27
+ const options = program.opts();
28
+ try {
29
+ options.startDate = validateOperationalDate(options.startDate);
30
+ options.endDate = validateOperationalDate(options.endDate);
31
+ }
32
+ catch (error) {
33
+ LOGGER.divider();
34
+ LOGGER.error(`--start-date and/or --end-date are not valid:`, error.message);
35
+ LOGGER.divider();
36
+ return;
37
+ }
38
+ //
39
+ // Ensure the output directory exists and is empty
40
+ if (fs.existsSync(options.outputDir) && !options.override) {
41
+ LOGGER.divider();
42
+ LOGGER.error(`Output directory "${options.outputDir}" already exists. Please remove it or change it before running the script.`);
43
+ LOGGER.divider();
44
+ return;
45
+ }
46
+ if (fs.existsSync(options.outputDir) && options.override) {
47
+ LOGGER.info(`Output directory "${options.outputDir}" already exists. It will be overridden.`);
48
+ fs.rmSync(options.outputDir, { recursive: true });
49
+ }
50
+ fs.mkdirSync(options.outputDir, { recursive: true });
51
+ //
52
+ // Log the ASCII art
53
+ LOGGER.spacer(3);
54
+ console.log(ASCII_CM_SHORT);
55
+ LOGGER.spacer(3);
56
+ //
57
+ // Start the offer generation process
58
+ await generateOfferOutput(options.file, options.startDate, options.endDate, options.outputDir, options.feedId);
59
+ //
60
+ })();
package/dist/main.js ADDED
@@ -0,0 +1,638 @@
1
+ /* * */
2
+ import LOGGER from '@helperkits/logger';
3
+ import TIMETRACKER from '@helperkits/timer';
4
+ import { JsonWriter } from '@helperkits/writer';
5
+ import { validateOperationalDate } from '@tmlmobilidade/types';
6
+ import { Dates } from '@tmlmobilidade/utils';
7
+ import { parse as csvParser } from 'csv-parse';
8
+ import extract from 'extract-zip';
9
+ import fs from 'fs';
10
+ import { ExceptionType } from 'gtfs-types';
11
+ /* * */
12
+ export async function generateOfferOutput(filePath, startDate, endDate, outputDir, feedId) {
13
+ try {
14
+ //
15
+ LOGGER.init();
16
+ const globalTimer = new TIMETRACKER();
17
+ //
18
+ // Setup the JSON batch writers to speed up the writing process and
19
+ // reduce the number of filesystem operations. These writers keep data
20
+ // in memory and write it to disk once the batch limit is reached.
21
+ const offerStopsWriter = new JsonWriter('offer-stops', `${outputDir}/offer-stops.json`, { add_after: '}', add_before: '{"payload":', batch_size: 100000 });
22
+ const offerJourneysWriter = new JsonWriter('offer-journeys', `${outputDir}/offer-journeys.json`, { add_after: '}', add_before: '{"payload":', batch_size: 100000 });
23
+ //
24
+ // Setup the required variables to keep track of the entities
25
+ // that should be saved as well as the original data that will
26
+ // be used to create the OfferJourney and OfferStop objects.
27
+ const referencedRouteIds = new Set();
28
+ const savedCalendarDates = new Map();
29
+ const savedTrips = new Map();
30
+ const savedStops = new Map();
31
+ const savedRoutes = new Map();
32
+ const savedStopTimes = new Map();
33
+ let totalOfferJourneysCounter = 0;
34
+ let totalOfferStopsCounter = 0;
35
+ //
36
+ // Prepare the working directories to work with the zip file
37
+ // and the extracted files. Try to unzip the archive.
38
+ const workdirPath = `/tmp/legacy-offer`;
39
+ const extractDirPath = `${workdirPath}/extracted`;
40
+ try {
41
+ fs.rmSync(workdirPath, { recursive: true });
42
+ fs.mkdirSync(workdirPath, { recursive: true });
43
+ LOGGER.success('Prepared working directory.');
44
+ LOGGER.spacer(1);
45
+ }
46
+ catch (error) {
47
+ LOGGER.error(`Error preparing workdir path "${workdirPath}".`, error);
48
+ process.exit(1);
49
+ }
50
+ try {
51
+ await unzipFile(filePath, extractDirPath);
52
+ LOGGER.success(`Unzipped GTFS file from "${filePath}" to "${extractDirPath}".`);
53
+ LOGGER.spacer(1);
54
+ }
55
+ catch (error) {
56
+ LOGGER.error('Error unzipping the file.', error);
57
+ process.exit(1);
58
+ }
59
+ //
60
+ // The order of execution matters when parsing each .txt file.
61
+ // This is because GTFS have a temporal validity. By first parsing calendar.txt
62
+ // and then calendar_dates.txt, we know exactly which service_ids "were active"
63
+ // between the start and end dates. Then, when parsing trips.txt, only trips
64
+ // that belong to those service_ids will be included. And so on, for each file.
65
+ // By having a list of trips we can extract only the necessary info from the other files,
66
+ // and thus significantly reducing the amount of information to be checked.
67
+ // --------------------------------------------------------------------------------------
68
+ /* * */
69
+ /* CALENDAR.TXT */
70
+ //
71
+ // Extract calendar.txt and filter only service_ids
72
+ // that are valid between the given start_date and end_date.
73
+ try {
74
+ //
75
+ LOGGER.info(`Reading zip entry "calendar.txt"...`);
76
+ const parseEachRow = async (data) => {
77
+ //
78
+ //
79
+ // Validate the start and end dates to ensure
80
+ // they are of in the OperationalDate format
81
+ let serviceIdStartDate;
82
+ let serviceIdEndDate;
83
+ try {
84
+ serviceIdStartDate = validateOperationalDate(data.start_date);
85
+ serviceIdEndDate = validateOperationalDate(data.end_date);
86
+ }
87
+ catch (error) {
88
+ LOGGER.error(`Error creating operational date "${data.start_date}" or "${data.end_date}" for service_id "${data.service_id}"`, error);
89
+ return;
90
+ }
91
+ //
92
+ // Check if this service_id is between the given start_date and end_date.
93
+ // Clip the service_id's start and end dates to the given start and end dates.
94
+ if (serviceIdEndDate < startDate || serviceIdStartDate > endDate)
95
+ return;
96
+ if (serviceIdStartDate < startDate)
97
+ serviceIdStartDate = startDate;
98
+ if (serviceIdEndDate > endDate)
99
+ serviceIdEndDate = endDate;
100
+ //
101
+ // If we're here, it means the service_id is valid between the given dates.
102
+ // For the configured weekly schedule, create the individual operational dates
103
+ // for each day of the week that is active.
104
+ const allOperationalDatesInRange = getIndividualDatesFromRange(serviceIdStartDate, serviceIdEndDate);
105
+ const validOperationalDates = [];
106
+ for (const currentDate of allOperationalDatesInRange) {
107
+ const dayOfWeek = Dates.fromOperationalDate(currentDate, 'Europe/Lisbon').toFormat('c');
108
+ if (dayOfWeek === '1' && String(data.monday) === '1')
109
+ validOperationalDates.push(currentDate);
110
+ if (dayOfWeek === '2' && String(data.tuesday) === '1')
111
+ validOperationalDates.push(currentDate);
112
+ if (dayOfWeek === '3' && String(data.wednesday) === '1')
113
+ validOperationalDates.push(currentDate);
114
+ if (dayOfWeek === '4' && String(data.thursday) === '1')
115
+ validOperationalDates.push(currentDate);
116
+ if (dayOfWeek === '5' && String(data.friday) === '1')
117
+ validOperationalDates.push(currentDate);
118
+ if (dayOfWeek === '6' && String(data.saturday) === '1')
119
+ validOperationalDates.push(currentDate);
120
+ if (dayOfWeek === '7' && String(data.sunday) === '1')
121
+ validOperationalDates.push(currentDate);
122
+ }
123
+ //
124
+ // Save the valid operational dates for this service_id
125
+ savedCalendarDates.set(data.service_id, validOperationalDates);
126
+ //
127
+ };
128
+ //
129
+ // Setup the CSV parsing operation only if the file exists
130
+ if (fs.existsSync(`${extractDirPath}/calendar.txt`)) {
131
+ await parseCsvFile(`${extractDirPath}/calendar.txt`, parseEachRow);
132
+ LOGGER.success(`Finished processing "calendar.txt"`);
133
+ LOGGER.spacer(1);
134
+ }
135
+ else {
136
+ LOGGER.info(`Optional file "calendar.txt" not found. This may or may not be an error. Proceeding...`);
137
+ LOGGER.spacer(1);
138
+ }
139
+ //
140
+ }
141
+ catch (error) {
142
+ LOGGER.error('Error processing "calendar.txt" file.', error);
143
+ throw new Error('✖︎ Error processing "calendar.txt" file.');
144
+ }
145
+ /* * */
146
+ /* CALENDAR_DATES.TXT */
147
+ //
148
+ // Extract calendar_dates.txt and either update the previously saved service_ids,
149
+ // based on the configured exception_type, or create new service_ids that were not
150
+ // present in calendar.txt and are between the given start and end dates.
151
+ try {
152
+ //
153
+ LOGGER.info(`Reading zip entry "calendar_dates.txt"...`);
154
+ const parseEachRow = async (data) => {
155
+ //
156
+ //
157
+ // Validate the date to ensure it is of type OperationalDate
158
+ let currentOperationalDate;
159
+ try {
160
+ currentOperationalDate = validateOperationalDate(data.date);
161
+ }
162
+ catch (error) {
163
+ LOGGER.error(`Error creating operational date "${data.date}" for service_id "${data.service_id}"`, error);
164
+ return;
165
+ }
166
+ //
167
+ // Skip if this row's date is not between the given start and end dates
168
+ if (currentOperationalDate < startDate || currentOperationalDate > endDate)
169
+ return;
170
+ //
171
+ // If we're here, it means the service_id is valid between the given dates.
172
+ // Get the previously saved calendars and check if it exists for this service_id.
173
+ const savedCalendar = savedCalendarDates.get(data.service_id);
174
+ if (savedCalendar) {
175
+ // Create a new Set to avoid duplicated dates
176
+ const updatedCalendar = new Set(savedCalendar);
177
+ // If this service_id was previously saved, either add or remove the current date
178
+ // to it based on the exception_type value for this row.
179
+ if (Number(data.exception_type) === ExceptionType.SERVICE_ADDED)
180
+ updatedCalendar.add(currentOperationalDate);
181
+ else if (Number(data.exception_type) === ExceptionType.SERVICE_REMOVED)
182
+ updatedCalendar.delete(currentOperationalDate);
183
+ // Update the service_id with the new dates
184
+ savedCalendarDates.set(data.service_id, Array.from(updatedCalendar));
185
+ }
186
+ else {
187
+ // If this is the first time we're seeing this service_id, then it is only necessary
188
+ // to initiate a new dates array if it is a service addition
189
+ if (Number(data.exception_type) === ExceptionType.SERVICE_ADDED) {
190
+ savedCalendarDates.set(data.service_id, [currentOperationalDate]);
191
+ }
192
+ }
193
+ //
194
+ };
195
+ //
196
+ // Setup the CSV parsing operation only if the file exists
197
+ if (fs.existsSync(`${extractDirPath}/calendar_dates.txt`)) {
198
+ await parseCsvFile(`${extractDirPath}/calendar_dates.txt`, parseEachRow);
199
+ LOGGER.success(`Finished processing "calendar_dates.txt"`);
200
+ LOGGER.spacer(1);
201
+ }
202
+ else {
203
+ LOGGER.info(`Optional file "calendar_dates.txt" not found. This may or may not be an error. Proceeding...`);
204
+ LOGGER.spacer(1);
205
+ }
206
+ //
207
+ }
208
+ catch (error) {
209
+ LOGGER.error('Error processing "calendar_dates.txt" file.', error);
210
+ throw new Error('✖︎ Error processing "calendar_dates.txt" file.');
211
+ }
212
+ /* * */
213
+ /* TRIPS.TXT */
214
+ //
215
+ // Next up: trips.txt
216
+ // Now that the calendars are sorted out, the jobs is easier for the trips.
217
+ // Only include trips which have the referenced service IDs saved before.
218
+ try {
219
+ //
220
+ LOGGER.info(`Reading zip entry "trips.txt"...`);
221
+ const parseEachRow = async (data) => {
222
+ //
223
+ //
224
+ // For each trip, check if the associated service_id was saved
225
+ // in the previous step or not. Include it if yes, skip otherwise.
226
+ if (!savedCalendarDates.has(data.service_id))
227
+ return;
228
+ //
229
+ // Format the exported row. Only include the minimum required data
230
+ // to prevent memory bloat later on.
231
+ const parsedRowData = {
232
+ direction_id: data.direction_id,
233
+ pattern_id: data.pattern_id,
234
+ route_id: data.route_id,
235
+ service_id: data.service_id,
236
+ shape_id: data.shape_id,
237
+ trip_headsign: data.trip_headsign,
238
+ trip_id: data.trip_id,
239
+ wheelchair_accessible: data.wheelchair_accessible,
240
+ };
241
+ //
242
+ // Save this trip for later and reference
243
+ // the associated route_id to filter them later.
244
+ savedTrips.set(data.trip_id, parsedRowData);
245
+ referencedRouteIds.add(data.route_id);
246
+ //
247
+ };
248
+ //
249
+ // Setup the CSV parsing operation
250
+ await parseCsvFile(`${extractDirPath}/trips.txt`, parseEachRow);
251
+ LOGGER.success(`Finished processing "trips.txt"`);
252
+ LOGGER.spacer(1);
253
+ //
254
+ }
255
+ catch (error) {
256
+ LOGGER.error('Error processing "trips.txt" file.', error);
257
+ throw new Error('✖︎ Error processing "trips.txt" file.');
258
+ }
259
+ /* * */
260
+ /* ROUTES.TXT */
261
+ //
262
+ // Next up: routes.txt
263
+ // For routes, only include the ones referenced in the filtered trips.
264
+ try {
265
+ //
266
+ LOGGER.info(`Reading zip entry "routes.txt"...`);
267
+ const parseEachRow = async (data) => {
268
+ //
269
+ //
270
+ // For each route, only save the ones referenced
271
+ // by the previously saved trips.
272
+ if (!referencedRouteIds.has(data.route_id))
273
+ return;
274
+ //
275
+ // Format and save the exported row
276
+ const parsedRowData = {
277
+ agency_id: data.agency_id,
278
+ line_id: data.line_id,
279
+ line_long_name: data.line_long_name,
280
+ line_short_name: data.line_short_name,
281
+ path_type: data.path_type,
282
+ route_color: data.route_color,
283
+ route_id: data.route_id,
284
+ route_long_name: data.route_long_name,
285
+ route_short_name: data.route_short_name,
286
+ route_text_color: data.route_text_color,
287
+ };
288
+ savedRoutes.set(data.route_id, parsedRowData);
289
+ //
290
+ };
291
+ //
292
+ // Setup the CSV parsing operation
293
+ await parseCsvFile(`${extractDirPath}/routes.txt`, parseEachRow);
294
+ LOGGER.success(`Finished processing "routes.txt"`);
295
+ LOGGER.spacer(1);
296
+ //
297
+ }
298
+ catch (error) {
299
+ LOGGER.error('Error processing "routes.txt" file.', error);
300
+ throw new Error('✖︎ Error processing "routes.txt" file.');
301
+ }
302
+ /* * */
303
+ /* STOPS.TXT */
304
+ //
305
+ // Next up: stops.txt
306
+ // For stops, include all of them since we don't have a way to filter them yet like trips/routes/shapes.
307
+ // By saving all of them, we also speed up the processing of each stop_time by including the stop data right away.
308
+ try {
309
+ //
310
+ LOGGER.info(`Reading zip entry "stops.txt"...`);
311
+ const parseEachRow = async (data) => {
312
+ //
313
+ //
314
+ // Save all stops, but only the mininum required data.
315
+ const parsedRowData = {
316
+ location_type: data.location_type,
317
+ municipality_id: data.municipality_id,
318
+ parent_station: data.parent_station,
319
+ parish_id: data.parish_id,
320
+ region_id: data.region_id,
321
+ stop_id: data.stop_id,
322
+ stop_lat: Number(data.stop_lat),
323
+ stop_lon: Number(data.stop_lon),
324
+ stop_name: data.stop_name,
325
+ };
326
+ savedStops.set(data.stop_id, parsedRowData);
327
+ //
328
+ };
329
+ //
330
+ // Setup the CSV parsing operation
331
+ await parseCsvFile(`${extractDirPath}/stops.txt`, parseEachRow);
332
+ LOGGER.success(`Finished processing "stops.txt"`);
333
+ LOGGER.spacer(1);
334
+ //
335
+ }
336
+ catch (error) {
337
+ LOGGER.error('Error processing "stops.txt" file.', error);
338
+ throw new Error('✖︎ Error processing "stops.txt" file.');
339
+ }
340
+ /* * */
341
+ /* STOP_TIMES.TXT */
342
+ //
343
+ // Next up: stop_times.txt
344
+ // Do a similiar check as the previous steps. Only include the stop_times for trips referenced before.
345
+ // Since this is the most resource intensive operation of them all, include the associated stop data
346
+ // right away to avoid another lookup later.
347
+ try {
348
+ //
349
+ LOGGER.info(`Reading zip entry "stop_times.txt"...`);
350
+ const parseEachRow = async (data) => {
351
+ //
352
+ //
353
+ // For each stopTime of each trip, check if the associated trip_id was saved
354
+ // in the previous step or not. Skip if this row's trip_id was not saved before.
355
+ // Also, check if the stop_id is valid and was saved before.
356
+ const tripData = savedTrips.get(data.trip_id);
357
+ if (!tripData)
358
+ return;
359
+ const stopData = savedStops.get(data.stop_id);
360
+ if (!stopData)
361
+ return;
362
+ //
363
+ // Format the exported row. Only include the minimum required data
364
+ // to prevent memory bloat later on, and include the stop data right away.
365
+ const parsedRowData = {
366
+ arrival_time: data.arrival_time,
367
+ continuous_drop_off: data.continuous_drop_off,
368
+ continuous_pickup: data.continuous_pickup,
369
+ departure_time: data.departure_time,
370
+ shape_dist_traveled: data.shape_dist_traveled,
371
+ stop_headsign: data.stop_headsign,
372
+ stop_id: data.stop_id,
373
+ stop_sequence: data.stop_sequence,
374
+ timepoint: data.timepoint,
375
+ trip_id: data.trip_id,
376
+ };
377
+ const savedStopTime = savedStopTimes.get(data.trip_id);
378
+ if (savedStopTime)
379
+ savedStopTimes.set(data.trip_id, [...savedStopTime, parsedRowData]);
380
+ else
381
+ savedStopTimes.set(data.trip_id, [parsedRowData]);
382
+ //
383
+ };
384
+ //
385
+ // Setup the CSV parsing operation
386
+ await parseCsvFile(`${extractDirPath}/stop_times.txt`, parseEachRow);
387
+ LOGGER.success(`Finished processing "stop_times.txt"`);
388
+ LOGGER.spacer(1);
389
+ //
390
+ }
391
+ catch (error) {
392
+ LOGGER.error('Error processing "stop_times.txt" file.', error);
393
+ throw new Error('✖︎ Error processing "stop_times.txt" file.');
394
+ }
395
+ /* * */
396
+ /* FINAL OUTPUT FILES */
397
+ //
398
+ // Actually construct the OfferJourney and OfferStop objects
399
+ // and write them to the output files. This is the final step of the process.
400
+ try {
401
+ //
402
+ for (const currentTrip of savedTrips.values()) {
403
+ //
404
+ //
405
+ // Get associated data for the current trip
406
+ const calendarDatesData = savedCalendarDates.get(currentTrip.service_id);
407
+ const stopTimesData = savedStopTimes.get(currentTrip.trip_id);
408
+ const routeData = savedRoutes.get(currentTrip.route_id);
409
+ if (!stopTimesData || stopTimesData.length === 0) {
410
+ LOGGER.error(`Trip ${currentTrip.trip_id} has no path data. Skipping...`);
411
+ continue;
412
+ }
413
+ //
414
+ // Build an OfferJourney object for all dates when the trip is valid
415
+ for (const currentCalendarDate of calendarDatesData) {
416
+ //
417
+ //
418
+ // Ensure the extension data is valid and convert it to meters, if necessary.
419
+ // Extract common use cases from the stopTimesData to avoid repeated calculations.
420
+ const firstStopTime = stopTimesData[0];
421
+ const firstStopData = savedStops.get(firstStopTime.stop_id);
422
+ const lastStopTime = stopTimesData[stopTimesData.length - 1];
423
+ const lastStopData = savedStops.get(lastStopTime.stop_id);
424
+ const extensionScheduledInMeters = convertMetersOrKilometersToMeters(lastStopTime.shape_dist_traveled, lastStopTime.shape_dist_traveled);
425
+ const currentDateFormated = Dates.fromOperationalDate(currentCalendarDate, 'Europe/Lisbon').toFormat('yyyy-MM-dd');
426
+ //
427
+ const offerJourneyData = {
428
+ agencyId: routeData.agency_id ?? '-',
429
+ arrivalTime: lastStopTime.arrival_time ?? '-',
430
+ bikesAllowed: null,
431
+ blockId: null,
432
+ circular: null,
433
+ continuousDropOff: null,
434
+ continuousPickup: null,
435
+ date: currentDateFormated,
436
+ dayType: null,
437
+ dayTypeName: null,
438
+ departureTime: firstStopTime.departure_time ?? '-',
439
+ directionId: currentTrip.direction_id ?? null,
440
+ endShiftId: null,
441
+ endStopCode: lastStopTime.stop_id ?? '-',
442
+ endStopId: lastStopTime.stop_id ?? '-',
443
+ endStopName: lastStopData.stop_name ?? '-',
444
+ feedId: feedId,
445
+ holiday: null,
446
+ holidayName: null,
447
+ lineId: String(routeData.line_id ?? '-'),
448
+ lineLongName: routeData.line_long_name ?? '-',
449
+ lineShortName: routeData.line_short_name ?? '-',
450
+ pathType: routeData.path_type ?? 0,
451
+ patternId: currentTrip.pattern_id ?? '-',
452
+ patternShortName: null,
453
+ period: null,
454
+ periodName: null,
455
+ routeColor: null,
456
+ routeDesc: null,
457
+ routeDestination: null,
458
+ routeId: currentTrip.route_id ?? '-',
459
+ routeLongName: routeData.route_long_name ?? '-',
460
+ routeOrigin: null,
461
+ routeShortName: routeData.route_short_name ?? '-',
462
+ routeTextColor: routeData.route_text_color ?? '-',
463
+ routeType: String(routeData.route_type ?? '-'),
464
+ rowId: null,
465
+ school: null,
466
+ shapeId: currentTrip.shape_id ?? '-',
467
+ startShiftId: null,
468
+ startStopCode: firstStopTime.stop_id ?? '-',
469
+ startStopId: firstStopTime.stop_id ?? '-',
470
+ startStopName: firstStopData.stop_name ?? '-',
471
+ tripHeadsign: currentTrip.trip_headsign ?? '-',
472
+ tripId: currentTrip.trip_id ?? '-',
473
+ tripLength: extensionScheduledInMeters ?? 0,
474
+ wheelchairAccessible: currentTrip.wheelchair_accessible ?? 0,
475
+ };
476
+ offerJourneysWriter.write(offerJourneyData);
477
+ totalOfferJourneysCounter++;
478
+ //
479
+ // Now, for each stop time of the current trip, create an OfferStop object
480
+ for (const currentStopTime of stopTimesData) {
481
+ //
482
+ const currentStop = savedStops.get(currentStopTime.stop_id);
483
+ const shapeDistTraveledInMeters = convertMetersOrKilometersToMeters(currentStopTime.shape_dist_traveled, lastStopTime.shape_dist_traveled);
484
+ const offerStopData = {
485
+ arrivalTime: currentStopTime.arrival_time,
486
+ bench: null,
487
+ continuousDropOff: currentStopTime.continuous_drop_off,
488
+ continuousPickup: currentStopTime.continuous_pickup,
489
+ date: currentDateFormated,
490
+ departureTime: currentStopTime.departure_time,
491
+ dropOffType: currentStopTime.drop_off_type,
492
+ entranceRestriction: null,
493
+ equipment: null,
494
+ exitRestriction: null,
495
+ feedId: feedId,
496
+ locationType: currentStop.location_type ?? 0,
497
+ municipality: Number(currentStop.municipality_id ?? 0),
498
+ municipalityFare1: null,
499
+ municipalityFare2: null,
500
+ networkMap: null,
501
+ parentStation: currentStop.parent_station ?? '',
502
+ pickupType: null,
503
+ platformCode: currentStop.platform_code,
504
+ preservationState: null,
505
+ realTimeInformation: null,
506
+ region: currentStop.region_id ?? '-',
507
+ rowId: null,
508
+ schedule: null,
509
+ shapeDistTraveled: shapeDistTraveledInMeters,
510
+ shelter: null,
511
+ signalling: null,
512
+ slot: null,
513
+ stopCode: currentStopTime.stop_id,
514
+ stopDesc: null,
515
+ stopHeadsign: currentStopTime.stop_headsign,
516
+ stopId: currentStopTime.stop_id,
517
+ stopIdStepp: null,
518
+ stopLat: currentStop.stop_lat,
519
+ stopLon: currentStop.stop_lon,
520
+ stopName: currentStop.stop_name,
521
+ stopRemarks: null,
522
+ stopSequence: currentStopTime.stop_sequence,
523
+ tariff: null,
524
+ timepoint: Number(currentStopTime.timepoint ?? 0),
525
+ tripId: currentTrip.trip_id,
526
+ wheelchairBoarding: Number(currentStop.wheelchair_boarding),
527
+ zoneShift: null,
528
+ };
529
+ offerStopsWriter.write(offerStopData);
530
+ totalOfferStopsCounter++;
531
+ //
532
+ }
533
+ }
534
+ //
535
+ // Delete the already written data to free up memory sooner
536
+ savedTrips.delete(currentTrip.trip_id);
537
+ savedStopTimes.delete(currentTrip.trip_id);
538
+ //
539
+ }
540
+ //
541
+ }
542
+ catch (error) {
543
+ LOGGER.error('Error transforming or saving Offer documents.', error);
544
+ throw new Error('✖︎ Error transforming or saving Offer documents.');
545
+ }
546
+ //
547
+ offerStopsWriter.close();
548
+ offerJourneysWriter.close();
549
+ LOGGER.spacer(1);
550
+ LOGGER.success(`Total OfferJourneys written: ${totalOfferJourneysCounter}`);
551
+ LOGGER.success(`Total OfferStops written: ${totalOfferStopsCounter}`);
552
+ LOGGER.terminate(`Finished processing GTFS file. Run took ${globalTimer.get()}`);
553
+ //
554
+ }
555
+ catch (error) {
556
+ LOGGER.error('An error occurred. Halting execution.', error);
557
+ LOGGER.error('Retrying in 10 seconds...');
558
+ setTimeout(() => {
559
+ process.exit(0); // End process
560
+ }, 10000); // after 10 seconds
561
+ }
562
+ //
563
+ }
564
+ ;
565
+ /* * */
566
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
567
+ async function parseCsvFile(filePath, rowParser) {
568
+ const parser = csvParser({ bom: true, columns: true, record_delimiter: ['\n', '\r', '\r\n'], skip_empty_lines: true, trim: true });
569
+ const fileStream = fs.createReadStream(filePath);
570
+ const stream = fileStream.pipe(parser);
571
+ for await (const rowData of stream) {
572
+ await rowParser(rowData);
573
+ }
574
+ }
575
+ /* * */
576
+ const unzipFile = async (zipFilePath, outputDir) => {
577
+ await extract(zipFilePath, { dir: outputDir });
578
+ setDirectoryPermissions(outputDir);
579
+ };
580
+ /* * */
581
+ const setDirectoryPermissions = (dirPath, mode = 0o666) => {
582
+ const files = fs.readdirSync(dirPath, { withFileTypes: true });
583
+ for (const file of files) {
584
+ const filePath = `${dirPath}/${file.name}`;
585
+ if (file.isDirectory()) {
586
+ setDirectoryPermissions(filePath, mode);
587
+ }
588
+ else {
589
+ fs.chmodSync(filePath, mode);
590
+ }
591
+ }
592
+ };
593
+ /* * */
594
+ /**
595
+ * This function checks if a value is small enough to be considered a meter value,
596
+ * as it should be used exclusevely for trip distance values.
597
+ * If the value is greater than 1, it is considered to be in meters.
598
+ * Converts a value to meters if it is in kilometers, otherwise returns meters.
599
+ *
600
+ * @param value - The value to be checked
601
+ * @param context - The context in which the value is being used
602
+ * @param ballpark - A ballpark value to be used as a reference. It is recommended to use the total distance of the object.
603
+ * @returns The value in meters
604
+ */
605
+ const convertMetersOrKilometersToMeters = (value, ballpark) => {
606
+ //
607
+ const valueAsNumber = Number(value);
608
+ const ballparkAsNumber = Number(ballpark);
609
+ if (Number.isNaN(valueAsNumber))
610
+ return -1;
611
+ if (Number.isNaN(ballparkAsNumber))
612
+ return -1;
613
+ // If the ballpark is bigger than 800, then the value is in meters
614
+ // Otherwise, the value is in kilometers. This is because it is unlikely
615
+ // that a trip will be smaller than 800 meters, and longer than 800 kilometers.
616
+ if (ballparkAsNumber > 800) {
617
+ return valueAsNumber;
618
+ }
619
+ else {
620
+ return valueAsNumber * 1000;
621
+ }
622
+ //
623
+ };
624
+ /* * */
625
+ export function getIndividualDatesFromRange(start, end) {
626
+ if (end < start)
627
+ throw new Error(`End date "${end}" must be after start date "${start}"`);
628
+ // Parse the start and end dates to ensure they are in the correct format
629
+ const startDate = Dates.fromOperationalDate(start, 'Europe/Lisbon');
630
+ const endDate = Dates.fromOperationalDate(end, 'Europe/Lisbon');
631
+ const dates = [];
632
+ let current = startDate;
633
+ while (current.operational_date <= endDate.operational_date) {
634
+ dates.push(current.operational_date);
635
+ current = current.plus({ days: 1 });
636
+ }
637
+ return dates;
638
+ }
package/dist/types.js ADDED
@@ -0,0 +1,2 @@
1
+ /* * */
2
+ export {};
package/package.json ADDED
@@ -0,0 +1,42 @@
1
+ {
2
+ "name": "@tmlmobilidade/generate-offer-files",
3
+ "version": "20250625.1536.33",
4
+ "type": "module",
5
+ "files": [
6
+ "dist"
7
+ ],
8
+ "main": "dist/index.js",
9
+ "bin": {
10
+ "bump": "./dist/index.js"
11
+ },
12
+ "types": "dist/index.d.ts",
13
+ "scripts": {
14
+ "build": "npm run clean && tsc && resolve-tspaths",
15
+ "clean": "rm -rf dist",
16
+ "dev": "tsx watch ./src/index.ts",
17
+ "lint": "eslint .",
18
+ "lint:fix": "eslint . --fix",
19
+ "start": "tsc && resolve-tspaths && node dist/index.js"
20
+ },
21
+ "dependencies": {
22
+ "@helperkits/logger": "20240703.1726.24",
23
+ "@helperkits/timer": "20240627.34.23",
24
+ "@helperkits/writer": "20250623.2358.3",
25
+ "@tmlmobilidade/interfaces": "*",
26
+ "@tmlmobilidade/lib": "*",
27
+ "@tmlmobilidade/utils": "*",
28
+ "commander": "14.0.0",
29
+ "csv-parse": "5.6.0",
30
+ "extract-zip": "2.0.1"
31
+ },
32
+ "devDependencies": {
33
+ "@tmlmobilidade/tsconfig": "*",
34
+ "@tmlmobilidade/types": "*",
35
+ "@types/luxon": "3.6.2",
36
+ "@types/node": "24.0.4",
37
+ "gtfs-types": "5.1.0",
38
+ "resolve-tspaths": "0.8.23",
39
+ "tsx": "4.20.3",
40
+ "typescript": "5.8.3"
41
+ }
42
+ }