@tmlmobilidade/import-gtfs 20251031.1051.3 → 20251202.1817.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +2 -2
- package/dist/index.js +2 -2
- package/dist/{src/main.js → main.js} +11 -10
- package/dist/{src/processors → processors}/calendar.js +8 -7
- package/dist/{src/processors → processors}/calendar_dates.js +7 -7
- package/dist/{src/processors → processors}/routes.js +6 -6
- package/dist/{src/processors → processors}/shapes.js +7 -7
- package/dist/{src/processors → processors}/stop_times.js +7 -7
- package/dist/{src/processors → processors}/stops.js +6 -6
- package/dist/{src/processors → processors}/trips.js +7 -7
- package/dist/{src/types.d.ts → types.d.ts} +8 -3
- package/dist/{src/utils → utils}/extract-file.js +8 -8
- package/dist/{src/utils → utils}/gtfs-time-string.js +1 -1
- package/dist/{src/utils → utils}/init-tables.js +2 -1
- package/package.json +19 -19
- /package/dist/{src/main.d.ts → main.d.ts} +0 -0
- /package/dist/{src/processors → processors}/calendar.d.ts +0 -0
- /package/dist/{src/processors → processors}/calendar_dates.d.ts +0 -0
- /package/dist/{src/processors → processors}/routes.d.ts +0 -0
- /package/dist/{src/processors → processors}/shapes.d.ts +0 -0
- /package/dist/{src/processors → processors}/stop_times.d.ts +0 -0
- /package/dist/{src/processors → processors}/stops.d.ts +0 -0
- /package/dist/{src/processors → processors}/trips.d.ts +0 -0
- /package/dist/{src/types.js → types.js} +0 -0
- /package/dist/{src/utils → utils}/extract-file.d.ts +0 -0
- /package/dist/{src/utils → utils}/gtfs-time-string.d.ts +0 -0
- /package/dist/{src/utils → utils}/init-tables.d.ts +0 -0
- /package/dist/{src/utils → utils}/parse-csv.d.ts +0 -0
- /package/dist/{src/utils → utils}/parse-csv.js +0 -0
- /package/dist/{src/utils → utils}/unzip-file.d.ts +0 -0
- /package/dist/{src/utils → utils}/unzip-file.js +0 -0
package/dist/index.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export * from './
|
|
2
|
-
export * from './
|
|
1
|
+
export * from './main.js';
|
|
2
|
+
export * from './types.js';
|
package/dist/index.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export * from './
|
|
2
|
-
export * from './
|
|
1
|
+
export * from './main.js';
|
|
2
|
+
export * from './types.js';
|
|
@@ -1,4 +1,9 @@
|
|
|
1
1
|
/* * */
|
|
2
|
+
import { downloadAndExtractGtfs } from './utils/extract-file.js';
|
|
3
|
+
import { initGtfsSqlTables } from './utils/init-tables.js';
|
|
4
|
+
import { Logger } from '@tmlmobilidade/logger';
|
|
5
|
+
import { Timer } from '@tmlmobilidade/timer';
|
|
6
|
+
/* * */
|
|
2
7
|
import { processCalendarFile } from './processors/calendar.js';
|
|
3
8
|
import { processCalendarDatesFile } from './processors/calendar_dates.js';
|
|
4
9
|
import { processRoutesFile } from './processors/routes.js';
|
|
@@ -6,10 +11,6 @@ import { processShapesFile } from './processors/shapes.js';
|
|
|
6
11
|
import { processStopTimesFile } from './processors/stop_times.js';
|
|
7
12
|
import { processStopsFile } from './processors/stops.js';
|
|
8
13
|
import { processTripsFile } from './processors/trips.js';
|
|
9
|
-
import { downloadAndExtractGtfs } from './utils/extract-file.js';
|
|
10
|
-
import { initGtfsSqlTables } from './utils/init-tables.js';
|
|
11
|
-
import TIMETRACKER from '@helperkits/timer';
|
|
12
|
-
import { Logs } from '@tmlmobilidade/utils';
|
|
13
14
|
/**
|
|
14
15
|
* Imports GTFS data into the database for a given plan.
|
|
15
16
|
* @param plan The plan containing GTFS feed information.
|
|
@@ -19,8 +20,8 @@ import { Logs } from '@tmlmobilidade/utils';
|
|
|
19
20
|
export async function importGtfsToDatabase(plan, config = {}) {
|
|
20
21
|
try {
|
|
21
22
|
//
|
|
22
|
-
const globalTimer = new
|
|
23
|
-
|
|
23
|
+
const globalTimer = new Timer();
|
|
24
|
+
Logger.info(`Importing ${plan._id} GTFS to database...`);
|
|
24
25
|
//
|
|
25
26
|
// Initialize context for the current plan
|
|
26
27
|
const context = {
|
|
@@ -52,14 +53,14 @@ export async function importGtfsToDatabase(plan, config = {}) {
|
|
|
52
53
|
await processShapesFile(context);
|
|
53
54
|
await processStopsFile(context);
|
|
54
55
|
await processStopTimesFile(context);
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
56
|
+
Logger.success(`Finished importing GTFS to database for plan "${plan._id}" in ${globalTimer.get()}.`, 0);
|
|
57
|
+
Logger.divider();
|
|
58
|
+
Logger.terminate(`Finished importing GTFS to database in ${globalTimer.get()}.`);
|
|
58
59
|
return context.gtfs;
|
|
59
60
|
//
|
|
60
61
|
}
|
|
61
62
|
catch (error) {
|
|
62
|
-
|
|
63
|
+
Logger.error('Error parsing plan.', error);
|
|
63
64
|
throw error;
|
|
64
65
|
}
|
|
65
66
|
}
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
/* * */
|
|
2
2
|
import { parseCsvFile } from '../utils/parse-csv.js';
|
|
3
|
-
import
|
|
3
|
+
import { Dates, getOperationalDatesFromRange } from '@tmlmobilidade/dates';
|
|
4
|
+
import { Logger } from '@tmlmobilidade/logger';
|
|
5
|
+
import { Timer } from '@tmlmobilidade/timer';
|
|
4
6
|
import { validateGtfsCalendar } from '@tmlmobilidade/types';
|
|
5
|
-
import { Dates, getOperationalDatesFromRange, Logs } from '@tmlmobilidade/utils';
|
|
6
7
|
import fs from 'node:fs';
|
|
7
8
|
/**
|
|
8
9
|
* Processes the calendar.txt file from the GTFS dataset.
|
|
@@ -15,8 +16,8 @@ import fs from 'node:fs';
|
|
|
15
16
|
export async function processCalendarFile(context, config) {
|
|
16
17
|
try {
|
|
17
18
|
//
|
|
18
|
-
const calendarParseTimer = new
|
|
19
|
-
|
|
19
|
+
const calendarParseTimer = new Timer();
|
|
20
|
+
Logger.info(`Reading zip entry "calendar.txt"...`);
|
|
20
21
|
const parseEachRow = async (data) => {
|
|
21
22
|
//
|
|
22
23
|
//
|
|
@@ -84,15 +85,15 @@ export async function processCalendarFile(context, config) {
|
|
|
84
85
|
// Setup the CSV parsing operation only if the file exists
|
|
85
86
|
if (fs.existsSync(`${context.workdir.extract_dir_path}/calendar.txt`)) {
|
|
86
87
|
await parseCsvFile(`${context.workdir.extract_dir_path}/calendar.txt`, parseEachRow);
|
|
87
|
-
|
|
88
|
+
Logger.success(`Finished processing "calendar.txt": ${context.gtfs.calendar_dates.size} rows saved in ${calendarParseTimer.get()}.`, 1);
|
|
88
89
|
}
|
|
89
90
|
else {
|
|
90
|
-
|
|
91
|
+
Logger.info(`Optional file "calendar.txt" not found. This may or may not be an error. Proceeding...`, 1);
|
|
91
92
|
}
|
|
92
93
|
//
|
|
93
94
|
}
|
|
94
95
|
catch (error) {
|
|
95
|
-
|
|
96
|
+
Logger.error('Error processing "calendar.txt" file.', error);
|
|
96
97
|
throw new Error('✖︎ Error processing "calendar.txt" file.');
|
|
97
98
|
}
|
|
98
99
|
}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
/* * */
|
|
2
2
|
import { parseCsvFile } from '../utils/parse-csv.js';
|
|
3
|
-
import
|
|
3
|
+
import { Logger } from '@tmlmobilidade/logger';
|
|
4
|
+
import { Timer } from '@tmlmobilidade/timer';
|
|
4
5
|
import { validateGtfsCalendarDate } from '@tmlmobilidade/types';
|
|
5
|
-
import { Logs } from '@tmlmobilidade/utils';
|
|
6
6
|
import fs from 'node:fs';
|
|
7
7
|
/**
|
|
8
8
|
* Processes the calendar_dates.txt file from the GTFS dataset.
|
|
@@ -15,8 +15,8 @@ import fs from 'node:fs';
|
|
|
15
15
|
export async function processCalendarDatesFile(context, config) {
|
|
16
16
|
try {
|
|
17
17
|
//
|
|
18
|
-
const calendarDatesParseTimer = new
|
|
19
|
-
|
|
18
|
+
const calendarDatesParseTimer = new Timer();
|
|
19
|
+
Logger.info(`Reading zip entry "calendar_dates.txt"...`);
|
|
20
20
|
const parseEachRow = async (data) => {
|
|
21
21
|
//
|
|
22
22
|
//
|
|
@@ -70,15 +70,15 @@ export async function processCalendarDatesFile(context, config) {
|
|
|
70
70
|
// Setup the CSV parsing operation only if the file exists
|
|
71
71
|
if (fs.existsSync(`${context.workdir.extract_dir_path}/calendar_dates.txt`)) {
|
|
72
72
|
await parseCsvFile(`${context.workdir.extract_dir_path}/calendar_dates.txt`, parseEachRow);
|
|
73
|
-
|
|
73
|
+
Logger.success(`Finished processing "calendar_dates.txt": ${Object.keys(context.gtfs.calendar_dates).length} rows saved in ${calendarDatesParseTimer.get()}.`, 1);
|
|
74
74
|
}
|
|
75
75
|
else {
|
|
76
|
-
|
|
76
|
+
Logger.info(`Optional file "calendar_dates.txt" not found. This may or may not be an error. Proceeding...`, 1);
|
|
77
77
|
}
|
|
78
78
|
//
|
|
79
79
|
}
|
|
80
80
|
catch (error) {
|
|
81
|
-
|
|
81
|
+
Logger.error('Error processing "calendar_dates.txt" file.', error);
|
|
82
82
|
throw new Error('✖︎ Error processing "calendar_dates.txt" file.');
|
|
83
83
|
}
|
|
84
84
|
}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
/* * */
|
|
2
2
|
import { parseCsvFile } from '../utils/parse-csv.js';
|
|
3
|
-
import
|
|
3
|
+
import { Logger } from '@tmlmobilidade/logger';
|
|
4
|
+
import { Timer } from '@tmlmobilidade/timer';
|
|
4
5
|
import { validateGtfsRouteExtended } from '@tmlmobilidade/types';
|
|
5
|
-
import { Logs } from '@tmlmobilidade/utils';
|
|
6
6
|
/**
|
|
7
7
|
* Processes the routes.txt file from the GTFS dataset.
|
|
8
8
|
* It filters routes based on the previously saved trips.
|
|
@@ -11,8 +11,8 @@ import { Logs } from '@tmlmobilidade/utils';
|
|
|
11
11
|
export async function processRoutesFile(context) {
|
|
12
12
|
try {
|
|
13
13
|
//
|
|
14
|
-
const routesParseTimer = new
|
|
15
|
-
|
|
14
|
+
const routesParseTimer = new Timer();
|
|
15
|
+
Logger.info(`Reading zip entry "routes.txt"...`);
|
|
16
16
|
const parseEachRow = async (data) => {
|
|
17
17
|
// Validate the current row against the proper type
|
|
18
18
|
const validatedData = validateGtfsRouteExtended(data);
|
|
@@ -27,11 +27,11 @@ export async function processRoutesFile(context) {
|
|
|
27
27
|
// Setup the CSV parsing operation
|
|
28
28
|
await parseCsvFile(`${context.workdir.extract_dir_path}/routes.txt`, parseEachRow);
|
|
29
29
|
context.gtfs.routes.flush();
|
|
30
|
-
|
|
30
|
+
Logger.success(`Finished processing "routes.txt": ${context.gtfs.routes.size} rows saved in ${routesParseTimer.get()}.`, 1);
|
|
31
31
|
//
|
|
32
32
|
}
|
|
33
33
|
catch (error) {
|
|
34
|
-
|
|
34
|
+
Logger.error('Error processing "routes.txt" file.', error);
|
|
35
35
|
throw new Error('✖︎ Error processing "routes.txt" file.');
|
|
36
36
|
}
|
|
37
37
|
}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
/* * */
|
|
2
2
|
import { parseCsvFile } from '../utils/parse-csv.js';
|
|
3
|
-
import
|
|
3
|
+
import { Logger } from '@tmlmobilidade/logger';
|
|
4
|
+
import { Timer } from '@tmlmobilidade/timer';
|
|
4
5
|
import { validateGtfsShape } from '@tmlmobilidade/types';
|
|
5
|
-
import { Logs } from '@tmlmobilidade/utils';
|
|
6
6
|
/**
|
|
7
7
|
* Processes the shapes.txt file from the GTFS dataset.
|
|
8
8
|
* Include only the shapes referenced by the previously saved trips.
|
|
@@ -11,8 +11,8 @@ import { Logs } from '@tmlmobilidade/utils';
|
|
|
11
11
|
export async function processShapesFile(context) {
|
|
12
12
|
try {
|
|
13
13
|
//
|
|
14
|
-
const shapesParseTimer = new
|
|
15
|
-
|
|
14
|
+
const shapesParseTimer = new Timer();
|
|
15
|
+
Logger.info(`Reading zip entry "shapes.txt"...`);
|
|
16
16
|
const parseEachRow = async (data) => {
|
|
17
17
|
// Validate the current row against the proper type
|
|
18
18
|
const validatedData = validateGtfsShape(data);
|
|
@@ -24,7 +24,7 @@ export async function processShapesFile(context) {
|
|
|
24
24
|
context.gtfs.shapes.write(validatedData);
|
|
25
25
|
// Log progress
|
|
26
26
|
if (context.counters.shapes % 100000 === 0)
|
|
27
|
-
|
|
27
|
+
Logger.info(`Parsed ${context.counters.shapes} shapes.txt rows so far.`);
|
|
28
28
|
// Increment the counter
|
|
29
29
|
context.counters.shapes++;
|
|
30
30
|
};
|
|
@@ -32,11 +32,11 @@ export async function processShapesFile(context) {
|
|
|
32
32
|
// Setup the CSV parsing operation
|
|
33
33
|
await parseCsvFile(`${context.workdir.extract_dir_path}/shapes.txt`, parseEachRow);
|
|
34
34
|
context.gtfs.shapes.flush();
|
|
35
|
-
|
|
35
|
+
Logger.success(`Finished processing "shapes.txt": ${context.gtfs.shapes.size} rows saved in ${shapesParseTimer.get()}.`, 1);
|
|
36
36
|
//
|
|
37
37
|
}
|
|
38
38
|
catch (error) {
|
|
39
|
-
|
|
39
|
+
Logger.error('Error processing "shapes.txt" file.', error);
|
|
40
40
|
throw new Error('✖︎ Error processing "shapes.txt" file.');
|
|
41
41
|
}
|
|
42
42
|
}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
/* * */
|
|
2
2
|
import { parseCsvFile } from '../utils/parse-csv.js';
|
|
3
|
-
import
|
|
3
|
+
import { Logger } from '@tmlmobilidade/logger';
|
|
4
|
+
import { Timer } from '@tmlmobilidade/timer';
|
|
4
5
|
import { validateGtfsStopTime } from '@tmlmobilidade/types';
|
|
5
|
-
import { Logs } from '@tmlmobilidade/utils';
|
|
6
6
|
/**
|
|
7
7
|
* Processes the stop_times.txt file from the GTFS dataset.
|
|
8
8
|
* Only include the stop_times for trips referenced before.
|
|
@@ -13,8 +13,8 @@ import { Logs } from '@tmlmobilidade/utils';
|
|
|
13
13
|
export async function processStopTimesFile(context) {
|
|
14
14
|
try {
|
|
15
15
|
//
|
|
16
|
-
const stopTimesParseTimer = new
|
|
17
|
-
|
|
16
|
+
const stopTimesParseTimer = new Timer();
|
|
17
|
+
Logger.info(`Reading zip entry "stop_times.txt"...`);
|
|
18
18
|
const parseEachRow = async (data) => {
|
|
19
19
|
// Validate the current row against the proper type
|
|
20
20
|
const validatedData = validateGtfsStopTime(data);
|
|
@@ -30,7 +30,7 @@ export async function processStopTimesFile(context) {
|
|
|
30
30
|
context.gtfs.stop_times.write(validatedData);
|
|
31
31
|
// Log progress
|
|
32
32
|
if (context.counters.stop_times % 100000 === 0)
|
|
33
|
-
|
|
33
|
+
Logger.info(`Parsed ${context.counters.stop_times} stop_times.txt rows so far.`);
|
|
34
34
|
// Increment the counter
|
|
35
35
|
context.counters.stop_times++;
|
|
36
36
|
};
|
|
@@ -38,11 +38,11 @@ export async function processStopTimesFile(context) {
|
|
|
38
38
|
// Setup the CSV parsing operation
|
|
39
39
|
await parseCsvFile(`${context.workdir.extract_dir_path}/stop_times.txt`, parseEachRow);
|
|
40
40
|
context.gtfs.stop_times.flush();
|
|
41
|
-
|
|
41
|
+
Logger.success(`Finished processing "stop_times.txt": ${context.counters.stop_times} rows saved in ${stopTimesParseTimer.get()}.`, 1);
|
|
42
42
|
//
|
|
43
43
|
}
|
|
44
44
|
catch (error) {
|
|
45
|
-
|
|
45
|
+
Logger.error('Error processing "stop_times.txt" file.', error);
|
|
46
46
|
throw new Error('✖︎ Error processing "stop_times.txt" file.');
|
|
47
47
|
}
|
|
48
48
|
}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
/* * */
|
|
2
2
|
import { parseCsvFile } from '../utils/parse-csv.js';
|
|
3
|
-
import TIMETRACKER from '@helperkits/timer';
|
|
4
3
|
import { validateGtfsStopExtended } from '@tmlmobilidade/types';
|
|
5
|
-
import {
|
|
4
|
+
import { Logger } from '@tmlmobilidade/logger';
|
|
5
|
+
import { Timer } from '@tmlmobilidade/timer';
|
|
6
6
|
/**
|
|
7
7
|
* Processes the stops.txt file from the GTFS dataset.
|
|
8
8
|
* include all of them since we don't have a way to filter them yet like trips/routes/shapes.
|
|
@@ -12,8 +12,8 @@ import { Logs } from '@tmlmobilidade/utils';
|
|
|
12
12
|
export async function processStopsFile(context) {
|
|
13
13
|
try {
|
|
14
14
|
//
|
|
15
|
-
const stopsParseTimer = new
|
|
16
|
-
|
|
15
|
+
const stopsParseTimer = new Timer();
|
|
16
|
+
Logger.info(`Reading zip entry "stops.txt"...`);
|
|
17
17
|
const parseEachRow = async (data) => {
|
|
18
18
|
// Validate the current row against the proper type
|
|
19
19
|
const validatedData = validateGtfsStopExtended(data);
|
|
@@ -27,11 +27,11 @@ export async function processStopsFile(context) {
|
|
|
27
27
|
// Setup the CSV parsing operation
|
|
28
28
|
await parseCsvFile(`${context.workdir.extract_dir_path}/stops.txt`, parseEachRow);
|
|
29
29
|
context.gtfs.stops.flush();
|
|
30
|
-
|
|
30
|
+
Logger.success(`Finished processing "stops.txt": ${context.gtfs.stops.size} rows saved in ${stopsParseTimer.get()}.`, 1);
|
|
31
31
|
//
|
|
32
32
|
}
|
|
33
33
|
catch (error) {
|
|
34
|
-
|
|
34
|
+
Logger.error('Error processing "stops.txt" file.', error);
|
|
35
35
|
throw new Error('✖︎ Error processing "stops.txt" file.');
|
|
36
36
|
}
|
|
37
37
|
}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
/* * */
|
|
2
2
|
import { parseCsvFile } from '../utils/parse-csv.js';
|
|
3
|
-
import TIMETRACKER from '@helperkits/timer';
|
|
4
3
|
import { validateGtfsTripExtended } from '@tmlmobilidade/types';
|
|
5
|
-
import {
|
|
4
|
+
import { Logger } from '@tmlmobilidade/logger';
|
|
5
|
+
import { Timer } from '@tmlmobilidade/timer';
|
|
6
6
|
/**
|
|
7
7
|
* Processes the trips.txt file from the GTFS dataset.
|
|
8
8
|
* It filters trips based on the previously saved calendar dates.
|
|
@@ -11,8 +11,8 @@ import { Logs } from '@tmlmobilidade/utils';
|
|
|
11
11
|
export async function processTripsFile(context) {
|
|
12
12
|
try {
|
|
13
13
|
//
|
|
14
|
-
const tripsParseTimer = new
|
|
15
|
-
|
|
14
|
+
const tripsParseTimer = new Timer();
|
|
15
|
+
Logger.info(`Reading zip entry "trips.txt"...`);
|
|
16
16
|
const parseEachRow = async (data) => {
|
|
17
17
|
// Validate the current row against the proper type
|
|
18
18
|
const validatedData = validateGtfsTripExtended(data);
|
|
@@ -27,7 +27,7 @@ export async function processTripsFile(context) {
|
|
|
27
27
|
context.referenced_shape_ids.add(validatedData.shape_id);
|
|
28
28
|
// Log progress
|
|
29
29
|
if (context.counters.trips % 10000 === 0)
|
|
30
|
-
|
|
30
|
+
Logger.info(`Parsed ${context.counters.trips} trips.txt rows so far.`);
|
|
31
31
|
// Increment the counter
|
|
32
32
|
context.counters.trips++;
|
|
33
33
|
};
|
|
@@ -35,11 +35,11 @@ export async function processTripsFile(context) {
|
|
|
35
35
|
// Setup the CSV parsing operation
|
|
36
36
|
await parseCsvFile(`${context.workdir.extract_dir_path}/trips.txt`, parseEachRow);
|
|
37
37
|
context.gtfs.trips.flush();
|
|
38
|
-
|
|
38
|
+
Logger.success(`Finished processing "trips.txt": ${context.gtfs.trips.size} rows saved in ${tripsParseTimer.get()}.`, 1);
|
|
39
39
|
//
|
|
40
40
|
}
|
|
41
41
|
catch (error) {
|
|
42
|
-
|
|
42
|
+
Logger.error('Error processing "trips.txt" file.', error);
|
|
43
43
|
throw new Error('✖︎ Error processing "trips.txt" file.');
|
|
44
44
|
}
|
|
45
45
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { type SQLiteTableInstance } from '@tmlmobilidade/
|
|
1
|
+
import { type SQLiteDatabase, type SQLiteTableInstance } from '@tmlmobilidade/sqlite';
|
|
2
2
|
import { type GTFS_Route_Extended, type GTFS_Shape, type GTFS_Stop_Extended, type GTFS_StopTime, type GTFS_Trip_Extended, type Plan } from '@tmlmobilidade/types';
|
|
3
3
|
import { type OperationalDate } from '@tmlmobilidade/types';
|
|
4
4
|
/**
|
|
@@ -12,11 +12,16 @@ export interface ImportGtfsToDatabaseConfig {
|
|
|
12
12
|
discrete_dates?: OperationalDate[];
|
|
13
13
|
}
|
|
14
14
|
/**
|
|
15
|
-
* Holds references to all GTFS-related SQL tables
|
|
15
|
+
* Holds references to all GTFS-related SQL tables.
|
|
16
16
|
* Each property corresponds to a specific GTFS entity and is associated
|
|
17
|
-
* with a
|
|
17
|
+
* with a `SQLiteTableInstance` instance for that entity. This structure
|
|
18
|
+
* allows for organized access and manipulation of GTFS data within the database,
|
|
19
|
+
* as well as batching operations through the underlying SQLite database connection.
|
|
20
|
+
* The `_db` property provides access to the raw SQLite database instance
|
|
21
|
+
* that can be used for executing custom queries or transactions.
|
|
18
22
|
*/
|
|
19
23
|
export interface GtfsSQLTables {
|
|
24
|
+
_db: SQLiteDatabase['databaseInstance'];
|
|
20
25
|
calendar_dates: Record<string, OperationalDate[]>;
|
|
21
26
|
routes: SQLiteTableInstance<GTFS_Route_Extended>;
|
|
22
27
|
shapes: SQLiteTableInstance<GTFS_Shape>;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
/* * */
|
|
2
2
|
import { unzipFile } from './unzip-file.js';
|
|
3
3
|
import { files } from '@tmlmobilidade/interfaces';
|
|
4
|
-
import {
|
|
4
|
+
import { Logger } from '@tmlmobilidade/logger';
|
|
5
5
|
import fs from 'node:fs';
|
|
6
6
|
/**
|
|
7
7
|
* Downloads and extracts the GTFS files for the given plan.
|
|
@@ -12,7 +12,7 @@ export async function downloadAndExtractGtfs(plan) {
|
|
|
12
12
|
//
|
|
13
13
|
// Return early if no operation file is found
|
|
14
14
|
if (!plan.operation_file_id) {
|
|
15
|
-
|
|
15
|
+
Logger.error(`No operation file found for plan "${plan._id}".`);
|
|
16
16
|
process.exit(1);
|
|
17
17
|
}
|
|
18
18
|
//
|
|
@@ -23,10 +23,10 @@ export async function downloadAndExtractGtfs(plan) {
|
|
|
23
23
|
try {
|
|
24
24
|
fs.rmSync(workdirPath, { force: true, recursive: true });
|
|
25
25
|
fs.mkdirSync(workdirPath, { recursive: true });
|
|
26
|
-
|
|
26
|
+
Logger.success('Prepared working directory.', 1);
|
|
27
27
|
}
|
|
28
28
|
catch (error) {
|
|
29
|
-
|
|
29
|
+
Logger.error(`Error preparing workdir path "${workdirPath}".`, error);
|
|
30
30
|
process.exit(1);
|
|
31
31
|
}
|
|
32
32
|
//
|
|
@@ -34,7 +34,7 @@ export async function downloadAndExtractGtfs(plan) {
|
|
|
34
34
|
// and try to download, save and unzip it.
|
|
35
35
|
const operationFileData = await files.findById(plan.operation_file_id);
|
|
36
36
|
if (!operationFileData || !operationFileData.url) {
|
|
37
|
-
|
|
37
|
+
Logger.error(`No operation file found for plan "${plan._id}".`);
|
|
38
38
|
process.exit(1);
|
|
39
39
|
}
|
|
40
40
|
try {
|
|
@@ -43,15 +43,15 @@ export async function downloadAndExtractGtfs(plan) {
|
|
|
43
43
|
fs.writeFileSync(downloadFilePath, Buffer.from(downloadArrayBuffer));
|
|
44
44
|
}
|
|
45
45
|
catch (error) {
|
|
46
|
-
|
|
46
|
+
Logger.error('Error downloading the file.', error);
|
|
47
47
|
process.exit(1);
|
|
48
48
|
}
|
|
49
49
|
try {
|
|
50
50
|
await unzipFile(downloadFilePath, extractDirPath);
|
|
51
|
-
|
|
51
|
+
Logger.success(`Unzipped GTFS file from "${downloadFilePath}" to "${extractDirPath}".`, 1);
|
|
52
52
|
}
|
|
53
53
|
catch (error) {
|
|
54
|
-
|
|
54
|
+
Logger.error('Error unzipping the file.', error);
|
|
55
55
|
process.exit(1);
|
|
56
56
|
}
|
|
57
57
|
return {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
/* * */
|
|
2
|
-
import { SQLiteDatabase } from '@tmlmobilidade/
|
|
2
|
+
import { SQLiteDatabase } from '@tmlmobilidade/sqlite';
|
|
3
3
|
/**
|
|
4
4
|
* Initializes GTFS SQL tables and writers.
|
|
5
5
|
* @returns An object containing initialized GTFS SQL tables and writers.
|
|
@@ -110,6 +110,7 @@ export function initGtfsSqlTables() {
|
|
|
110
110
|
],
|
|
111
111
|
});
|
|
112
112
|
return {
|
|
113
|
+
_db: database.databaseInstance,
|
|
113
114
|
calendar_dates: calendarDatesMap,
|
|
114
115
|
routes: routesTable,
|
|
115
116
|
shapes: shapesTable,
|
package/package.json
CHANGED
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tmlmobilidade/import-gtfs",
|
|
3
|
-
"version": "
|
|
4
|
-
"author":
|
|
3
|
+
"version": "20251202.1817.5",
|
|
4
|
+
"author": {
|
|
5
|
+
"email": "iso@tmlmobilidade.pt",
|
|
6
|
+
"name": "TML-ISO"
|
|
7
|
+
},
|
|
5
8
|
"license": "AGPL-3.0-or-later",
|
|
6
9
|
"homepage": "https://github.com/tmlmobilidade/services#readme",
|
|
7
10
|
"bugs": {
|
|
@@ -22,33 +25,30 @@
|
|
|
22
25
|
},
|
|
23
26
|
"type": "module",
|
|
24
27
|
"files": [
|
|
25
|
-
"dist
|
|
28
|
+
"dist"
|
|
26
29
|
],
|
|
27
|
-
"
|
|
28
|
-
|
|
29
|
-
"types": "./dist/index.d.ts",
|
|
30
|
-
"default": "./dist/index.js"
|
|
31
|
-
}
|
|
32
|
-
},
|
|
30
|
+
"main": "./dist/index.js",
|
|
31
|
+
"types": "./dist/index.d.ts",
|
|
33
32
|
"scripts": {
|
|
34
|
-
"build": "
|
|
35
|
-
"
|
|
36
|
-
"
|
|
37
|
-
"
|
|
33
|
+
"build": "tsc && resolve-tspaths",
|
|
34
|
+
"lint": "eslint ./src/ && tsc --noEmit",
|
|
35
|
+
"lint:fix": "eslint ./src/ --fix",
|
|
36
|
+
"watch": "tsc-watch --onSuccess 'resolve-tspaths'"
|
|
38
37
|
},
|
|
39
38
|
"dependencies": {
|
|
40
|
-
"@tmlmobilidade/connectors": "*",
|
|
41
39
|
"@tmlmobilidade/interfaces": "*",
|
|
42
|
-
"@tmlmobilidade/
|
|
40
|
+
"@tmlmobilidade/logger": "*",
|
|
41
|
+
"@tmlmobilidade/sqlite": "*",
|
|
42
|
+
"@tmlmobilidade/timer": "*",
|
|
43
|
+
"csv-parse": "6.1.0",
|
|
44
|
+
"extract-zip": "2.0.1"
|
|
43
45
|
},
|
|
44
46
|
"devDependencies": {
|
|
45
|
-
"@carrismetropolitana/eslint": "20250622.1204.50",
|
|
46
|
-
"@tmlmobilidade/lib": "*",
|
|
47
47
|
"@tmlmobilidade/tsconfig": "*",
|
|
48
48
|
"@tmlmobilidade/types": "*",
|
|
49
|
-
"@types/node": "24.
|
|
49
|
+
"@types/node": "24.10.1",
|
|
50
50
|
"resolve-tspaths": "0.8.23",
|
|
51
|
-
"
|
|
51
|
+
"tsc-watch": "7.2.0",
|
|
52
52
|
"typescript": "5.9.3"
|
|
53
53
|
}
|
|
54
54
|
}
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|