@tmlmobilidade/writers 20251202.1817.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/csv.d.ts ADDED
@@ -0,0 +1,18 @@
1
+ interface CsvWriterOptions {
2
+ batch_size?: number;
3
+ include_bom?: boolean;
4
+ new_line_character?: string;
5
+ }
6
+ export declare class CsvWriter<T> {
7
+ private CURRENT_BATCH_DATA;
8
+ private FILE_PATH;
9
+ private INCLUDE_BOM;
10
+ private INSTANCE_NAME;
11
+ private MAX_BATCH_SIZE;
12
+ private NEW_LINE_CHARACTER;
13
+ private SESSION_TIMER;
14
+ constructor(instanceName: string, filePath: string, options?: CsvWriterOptions);
15
+ flush(): Promise<void>;
16
+ write(data: T | T[]): Promise<void>;
17
+ }
18
+ export {};
package/dist/csv.js ADDED
@@ -0,0 +1,90 @@
1
+ /* * */
2
+ import { Logger } from '@tmlmobilidade/logger';
3
+ import { Timer } from '@tmlmobilidade/timer';
4
+ import fs from 'node:fs';
5
+ import Papa from 'papaparse';
6
+ /* * */
7
+ export class CsvWriter {
8
+ //
9
+ CURRENT_BATCH_DATA = [];
10
+ FILE_PATH = null;
11
+ INCLUDE_BOM = false;
12
+ INSTANCE_NAME = 'Unnamed Instance';
13
+ MAX_BATCH_SIZE = 5000;
14
+ NEW_LINE_CHARACTER = '\n';
15
+ SESSION_TIMER = new Timer();
16
+ /* * */
17
+ constructor(instanceName, filePath, options) {
18
+ this.INSTANCE_NAME = instanceName;
19
+ this.FILE_PATH = filePath;
20
+ if (options?.batch_size)
21
+ this.MAX_BATCH_SIZE = options.batch_size;
22
+ if (options?.new_line_character)
23
+ this.NEW_LINE_CHARACTER = options.new_line_character;
24
+ if (options?.include_bom)
25
+ this.INCLUDE_BOM = options.include_bom;
26
+ }
27
+ /* * */
28
+ async flush() {
29
+ try {
30
+ //
31
+ if (!this.FILE_PATH)
32
+ return;
33
+ const flushTimer = new Timer();
34
+ const sssionTimerResult = this.SESSION_TIMER.get();
35
+ if (this.CURRENT_BATCH_DATA.length === 0)
36
+ return;
37
+ //
38
+ // Setup a variable to keep track if the file exists or not
39
+ const fileAlreadyExists = fs.existsSync(this.FILE_PATH);
40
+ //
41
+ // Use papaparse to produce the CSV string
42
+ let csvData = Papa.unparse(this.CURRENT_BATCH_DATA, { header: !fileAlreadyExists, newline: this.NEW_LINE_CHARACTER, skipEmptyLines: 'greedy' });
43
+ //
44
+ // Prepend BOM if this is the first write and BOM is enabled
45
+ if (!fileAlreadyExists && this.INCLUDE_BOM) {
46
+ csvData = '\uFEFF' + csvData;
47
+ }
48
+ //
49
+ // Prepend a new line character to csvData string
50
+ // if it is not the first line on the file.
51
+ if (fileAlreadyExists) {
52
+ csvData = this.NEW_LINE_CHARACTER + csvData;
53
+ }
54
+ //
55
+ // Recurseively ensure that the directory
56
+ // for the file path exists.
57
+ const dirPath = this.FILE_PATH.substring(0, this.FILE_PATH.lastIndexOf('/'));
58
+ if (!fs.existsSync(dirPath))
59
+ fs.mkdirSync(dirPath, { recursive: true });
60
+ //
61
+ // Append the csv string to the file
62
+ fs.appendFileSync(this.FILE_PATH, csvData, { encoding: 'utf-8', flush: true });
63
+ Logger.info(`CSVWRITER [${this.INSTANCE_NAME}]: Flush | Length: ${this.CURRENT_BATCH_DATA.length} | File Path: ${this.FILE_PATH} (session: ${sssionTimerResult}) (flush: ${flushTimer.get()})`);
64
+ this.CURRENT_BATCH_DATA = [];
65
+ //
66
+ }
67
+ catch (error) {
68
+ throw new Error(`Error at flush(): ${error.message}`);
69
+ }
70
+ }
71
+ /* * */
72
+ async write(data) {
73
+ //
74
+ //
75
+ // Check if the batch is full
76
+ if (this.CURRENT_BATCH_DATA.length >= this.MAX_BATCH_SIZE)
77
+ await this.flush();
78
+ //
79
+ // Reset the timer
80
+ if (this.CURRENT_BATCH_DATA.length === 0)
81
+ this.SESSION_TIMER.reset();
82
+ //
83
+ // Add the data to the batch
84
+ if (Array.isArray(data))
85
+ this.CURRENT_BATCH_DATA.push(...data);
86
+ else
87
+ this.CURRENT_BATCH_DATA.push(data);
88
+ //
89
+ }
90
+ }
@@ -0,0 +1,4 @@
1
+ export * from './csv.js';
2
+ export * from './json.js';
3
+ export * from './mongo.js';
4
+ export * from './postgres.js';
package/dist/index.js ADDED
@@ -0,0 +1,4 @@
1
+ export * from './csv.js';
2
+ export * from './json.js';
3
+ export * from './mongo.js';
4
+ export * from './postgres.js';
package/dist/json.d.ts ADDED
@@ -0,0 +1,19 @@
1
+ interface JsonWriterOptions {
2
+ add_after?: string;
3
+ add_before?: string;
4
+ batch_size?: number;
5
+ }
6
+ export declare class JsonWriter<T> {
7
+ private readonly ADD_AFTER;
8
+ private readonly ADD_BEFORE;
9
+ private CURRENT_BATCH_DATA;
10
+ private FILE_PATH;
11
+ private INSTANCE_NAME;
12
+ private MAX_BATCH_SIZE;
13
+ private SESSION_TIMER;
14
+ constructor(instanceName: string, filePath: string, options?: JsonWriterOptions);
15
+ close(): void;
16
+ flush(): void;
17
+ write(data: T | T[]): void;
18
+ }
19
+ export {};
package/dist/json.js ADDED
@@ -0,0 +1,86 @@
1
+ /* * */
2
+ import { Logger } from '@tmlmobilidade/logger';
3
+ import { Timer } from '@tmlmobilidade/timer';
4
+ import fs from 'node:fs';
5
+ /* * */
6
+ export class JsonWriter {
7
+ //
8
+ ADD_AFTER = null;
9
+ ADD_BEFORE = null;
10
+ CURRENT_BATCH_DATA = [];
11
+ FILE_PATH = null;
12
+ INSTANCE_NAME = 'Unnamed Instance';
13
+ MAX_BATCH_SIZE = 5000;
14
+ SESSION_TIMER = new Timer();
15
+ /* * */
16
+ constructor(instanceName, filePath, options) {
17
+ this.INSTANCE_NAME = instanceName;
18
+ this.FILE_PATH = filePath;
19
+ if (options?.add_after)
20
+ this.ADD_AFTER = options.add_after;
21
+ if (options?.add_before)
22
+ this.ADD_BEFORE = options.add_before;
23
+ if (options?.batch_size)
24
+ this.MAX_BATCH_SIZE = options.batch_size;
25
+ }
26
+ /* * */
27
+ close() {
28
+ this.flush();
29
+ let writableData = ']';
30
+ if (this.ADD_AFTER)
31
+ writableData = writableData + this.ADD_AFTER;
32
+ fs.appendFileSync(this.FILE_PATH, writableData);
33
+ }
34
+ /* * */
35
+ flush() {
36
+ //
37
+ if (!this.FILE_PATH)
38
+ throw new Error('File path is not set. Please provide a valid file path.');
39
+ const flushTimer = new Timer();
40
+ const sssionTimerResult = this.SESSION_TIMER.get();
41
+ if (this.CURRENT_BATCH_DATA.length === 0)
42
+ return;
43
+ // Setup a variable to keep track if the file exists or not
44
+ const fileAlreadyExists = fs.existsSync(this.FILE_PATH);
45
+ // Use papaparse to produce the CSV string
46
+ let writableData = JSON.stringify(this.CURRENT_BATCH_DATA);
47
+ // Remove the first and last characters if they are brackets
48
+ if (writableData.startsWith('['))
49
+ writableData = writableData.slice(1);
50
+ if (writableData.endsWith(']'))
51
+ writableData = writableData.slice(0, -1);
52
+ // If the file already does not yet exist then this is the first flush,
53
+ // which means we need to add an opening bracket. If the file already exists,
54
+ // we need to add a comma before the data.
55
+ if (!fileAlreadyExists)
56
+ writableData = '[' + writableData;
57
+ else
58
+ writableData = ',' + writableData;
59
+ // If the file does not exist add the value for ADD_BEFORE, if it is set.
60
+ if (!fileAlreadyExists && this.ADD_BEFORE)
61
+ writableData = this.ADD_BEFORE + writableData;
62
+ // Append the csv string to the file
63
+ fs.appendFileSync(this.FILE_PATH, writableData);
64
+ Logger.info(`JSONWRITER [${this.INSTANCE_NAME}]: Flush | Length: ${this.CURRENT_BATCH_DATA.length} | File Path: ${this.FILE_PATH} (session: ${sssionTimerResult}) (flush: ${flushTimer.get()})`);
65
+ this.CURRENT_BATCH_DATA = [];
66
+ }
67
+ /* * */
68
+ write(data) {
69
+ // Check if the batch is full
70
+ if (this.CURRENT_BATCH_DATA.length >= this.MAX_BATCH_SIZE) {
71
+ this.flush();
72
+ }
73
+ // Reset the timer
74
+ if (this.CURRENT_BATCH_DATA.length === 0) {
75
+ this.SESSION_TIMER.reset();
76
+ }
77
+ // Add the data to the batch
78
+ if (Array.isArray(data)) {
79
+ this.CURRENT_BATCH_DATA = [...this.CURRENT_BATCH_DATA, ...data];
80
+ }
81
+ else {
82
+ this.CURRENT_BATCH_DATA.push(data);
83
+ }
84
+ //
85
+ }
86
+ }
@@ -0,0 +1,62 @@
1
+ interface MongoDbWriterParams {
2
+ /**
3
+ * The maximum number of items to hold in memory
4
+ * before flushing to the database.
5
+ * @default 3000
6
+ */
7
+ batch_size?: number;
8
+ /**
9
+ * How long, in milliseconds, data should be kept in memory before
10
+ * flushing to the database. If this feature is enabled, a flush will
11
+ * be triggered even if the batch is not full. Disabled by default.
12
+ * @default disabled
13
+ */
14
+ batch_timeout?: number;
15
+ /**
16
+ * The MongoDB collection to write to.
17
+ * @required
18
+ */
19
+ collection: any;
20
+ /**
21
+ * How long to wait, in milliseconds, after the last write operation
22
+ * before flushing the data to the database. This can be used to prevent
23
+ * items staying in memory for too long if the batch size is not reached
24
+ * frequently enough. Disabled by default.
25
+ * @default disabled
26
+ */
27
+ idle_timeout?: number;
28
+ }
29
+ export interface MongoDbWriterWriteOptions {
30
+ filter?: object;
31
+ upsert?: boolean;
32
+ write_mode?: 'replace' | 'update';
33
+ }
34
+ export interface MongoDBWriterWriteOps<T> {
35
+ data: T;
36
+ options: MongoDbWriterWriteOptions;
37
+ }
38
+ export declare class MongoDbWriter<T> {
39
+ private BATCH_SIZE;
40
+ private BATCH_TIMEOUT_ENABLED;
41
+ private BATCH_TIMEOUT_TIMER;
42
+ private BATCH_TIMEOUT_VALUE;
43
+ private DATA_BUCKET_ALWAYS_AVAILABLE;
44
+ private DATA_BUCKET_FLUSH_OPS;
45
+ private DB_COLLECTION;
46
+ private IDLE_TIMEOUT_ENABLED;
47
+ private IDLE_TIMEOUT_TIMER;
48
+ private IDLE_TIMEOUT_VALUE;
49
+ private SESSION_TIMER;
50
+ constructor(params: MongoDbWriterParams);
51
+ flush(callback?: (data?: MongoDBWriterWriteOps<T>[]) => Promise<void>): Promise<void>;
52
+ /**
53
+ * Write data to the MongoDB collection.
54
+ *
55
+ * @param data The data to write
56
+ * @param options Options for the write operation
57
+ * @param writeCallback Callback function to call after the write operation is complete
58
+ * @param flushCallback Callback function to call after the flush operation is complete
59
+ */
60
+ write(data: T | T[], options?: MongoDbWriterWriteOptions, writeCallback?: () => Promise<void>, flushCallback?: (data?: MongoDBWriterWriteOps<T>[]) => Promise<void>): Promise<void>;
61
+ }
62
+ export {};
package/dist/mongo.js ADDED
@@ -0,0 +1,174 @@
1
+ /* eslint-disable @typescript-eslint/no-explicit-any */
2
+ /* * */
3
+ import { Logger } from '@tmlmobilidade/logger';
4
+ import { Timer } from '@tmlmobilidade/timer';
5
+ /* * */
6
+ export class MongoDbWriter {
7
+ //
8
+ BATCH_SIZE = 3000;
9
+ BATCH_TIMEOUT_ENABLED = false;
10
+ BATCH_TIMEOUT_TIMER = null;
11
+ BATCH_TIMEOUT_VALUE = -1;
12
+ DATA_BUCKET_ALWAYS_AVAILABLE = [];
13
+ DATA_BUCKET_FLUSH_OPS = [];
14
+ DB_COLLECTION = null;
15
+ IDLE_TIMEOUT_ENABLED = false;
16
+ IDLE_TIMEOUT_TIMER = null;
17
+ IDLE_TIMEOUT_VALUE = -1;
18
+ SESSION_TIMER = new Timer();
19
+ /* * */
20
+ constructor(params) {
21
+ // Ensure that the MongoDB Collection is provided
22
+ if (!params.collection)
23
+ throw new Error('MONGODBWRITER: Collection is required');
24
+ this.DB_COLLECTION = params.collection;
25
+ // Setup the optional idle timeout functionality
26
+ if (params.idle_timeout > 0) {
27
+ this.IDLE_TIMEOUT_ENABLED = true;
28
+ this.IDLE_TIMEOUT_VALUE = params.idle_timeout;
29
+ }
30
+ // Override the default batch size
31
+ if (params.batch_size > 0) {
32
+ this.BATCH_SIZE = params.batch_size;
33
+ }
34
+ // Setup the optional batch timeout functionality
35
+ if (params.batch_timeout > 0) {
36
+ this.BATCH_TIMEOUT_ENABLED = true;
37
+ this.BATCH_TIMEOUT_VALUE = params.batch_timeout;
38
+ }
39
+ }
40
+ /* * */
41
+ async flush(callback) {
42
+ try {
43
+ //
44
+ const flushTimer = new Timer();
45
+ const sssionTimerResult = this.SESSION_TIMER.get();
46
+ //
47
+ // Invalidate all timers since a flush operation is being performed
48
+ if (this.IDLE_TIMEOUT_TIMER) {
49
+ clearTimeout(this.IDLE_TIMEOUT_TIMER);
50
+ this.IDLE_TIMEOUT_TIMER = null;
51
+ }
52
+ if (this.BATCH_TIMEOUT_TIMER) {
53
+ clearTimeout(this.BATCH_TIMEOUT_TIMER);
54
+ this.BATCH_TIMEOUT_TIMER = null;
55
+ }
56
+ //
57
+ // Skip if there is no data to flush
58
+ if (this.DATA_BUCKET_ALWAYS_AVAILABLE.length === 0)
59
+ return;
60
+ //
61
+ // Copy everything in data DATA_BUCKET_ALWAYS_AVAILABLE to DATA_BUCKET_FLUSH_OPS
62
+ // to prevent any new incoming data to be added to the batch. This is to ensure
63
+ // that the batch is not modified while it is being processed.
64
+ this.DATA_BUCKET_FLUSH_OPS = [...this.DATA_BUCKET_FLUSH_OPS, ...this.DATA_BUCKET_ALWAYS_AVAILABLE];
65
+ this.DATA_BUCKET_ALWAYS_AVAILABLE = [];
66
+ //
67
+ // Process the data into MongoDB insert/update operations
68
+ try {
69
+ const writeOperations = this.DATA_BUCKET_FLUSH_OPS.map((item) => {
70
+ switch (item.options?.write_mode) {
71
+ case 'update':
72
+ return {
73
+ updateOne: {
74
+ filter: item.options.filter,
75
+ update: item.data,
76
+ upsert: true,
77
+ },
78
+ };
79
+ case 'replace':
80
+ default:
81
+ return {
82
+ replaceOne: {
83
+ filter: item.options.filter,
84
+ replacement: item.data,
85
+ upsert: item.options?.upsert ? true : false,
86
+ },
87
+ };
88
+ }
89
+ });
90
+ await this.DB_COLLECTION.bulkWrite(writeOperations);
91
+ Logger.info(`MONGODBWRITER [${this.DB_COLLECTION.collectionName}]: Flush | Length: ${this.DATA_BUCKET_FLUSH_OPS.length} (session: ${sssionTimerResult}) (flush: ${flushTimer.get()})`);
92
+ //
93
+ // Call the flush callback, if provided
94
+ if (callback) {
95
+ await callback(this.DATA_BUCKET_FLUSH_OPS);
96
+ }
97
+ //
98
+ // Reset the flush bucket
99
+ this.DATA_BUCKET_FLUSH_OPS = [];
100
+ //
101
+ }
102
+ catch (error) {
103
+ Logger.error(`MONGODBWRITER [${this.DB_COLLECTION.collectionName}]: Error @ flush().writeOperations(): ${error.message}`);
104
+ }
105
+ //
106
+ }
107
+ catch (error) {
108
+ Logger.error(`MONGODBWRITER [${this.DB_COLLECTION.collectionName}]: Error @ flush(): ${error.message}`);
109
+ }
110
+ }
111
+ /* * */
112
+ /**
113
+ * Write data to the MongoDB collection.
114
+ *
115
+ * @param data The data to write
116
+ * @param options Options for the write operation
117
+ * @param writeCallback Callback function to call after the write operation is complete
118
+ * @param flushCallback Callback function to call after the flush operation is complete
119
+ */
120
+ async write(data, options = {}, writeCallback, flushCallback) {
121
+ //
122
+ //
123
+ // Invalidate the previously set idle timeout timer
124
+ // since we are performing a write operation again.
125
+ if (this.IDLE_TIMEOUT_TIMER) {
126
+ clearTimeout(this.IDLE_TIMEOUT_TIMER);
127
+ this.IDLE_TIMEOUT_TIMER = null;
128
+ }
129
+ //
130
+ // Check if the batch is full
131
+ if (this.DATA_BUCKET_ALWAYS_AVAILABLE.length >= this.BATCH_SIZE) {
132
+ Logger.info(`MONGODBWRITER [${this.DB_COLLECTION.collectionName}]: Batch full. Flushing data...`);
133
+ await this.flush(flushCallback);
134
+ }
135
+ //
136
+ // Reset the session timer (for logging purposes)
137
+ if (this.DATA_BUCKET_ALWAYS_AVAILABLE.length === 0) {
138
+ this.SESSION_TIMER.reset();
139
+ }
140
+ //
141
+ // Add the current data to the batch
142
+ if (Array.isArray(data)) {
143
+ const combinedDataWithOptions = data.map(item => ({ data: item, options: options }));
144
+ this.DATA_BUCKET_ALWAYS_AVAILABLE = [...this.DATA_BUCKET_ALWAYS_AVAILABLE, ...combinedDataWithOptions];
145
+ }
146
+ else {
147
+ this.DATA_BUCKET_ALWAYS_AVAILABLE.push({ data: data, options: options });
148
+ }
149
+ //
150
+ // Call the write callback, if provided
151
+ if (writeCallback) {
152
+ await writeCallback();
153
+ }
154
+ //
155
+ // Setup the idle timeout timer to flush the data if too long has passed
156
+ // since the last write operation. Check if this functionality is enabled.
157
+ if (this.IDLE_TIMEOUT_ENABLED && !this.IDLE_TIMEOUT_TIMER) {
158
+ this.IDLE_TIMEOUT_TIMER = setTimeout(async () => {
159
+ Logger.info(`MONGODBWRITER [${this.DB_COLLECTION.collectionName}]: Idle timeout reached. Flushing data...`);
160
+ await this.flush(flushCallback);
161
+ }, this.IDLE_TIMEOUT_VALUE);
162
+ }
163
+ //
164
+ // Setup the batch timeout timer to flush the data, if the timeout value is reached,
165
+ // even if the batch is not full. Check if this functionality is enabled.
166
+ if (this.BATCH_TIMEOUT_ENABLED && !this.BATCH_TIMEOUT_TIMER) {
167
+ this.BATCH_TIMEOUT_TIMER = setTimeout(async () => {
168
+ Logger.info(`MONGODBWRITER [${this.DB_COLLECTION.collectionName}]: Batch timeout reached. Flushing data...`);
169
+ await this.flush(flushCallback);
170
+ }, this.BATCH_TIMEOUT_VALUE);
171
+ }
172
+ //
173
+ }
174
+ }
@@ -0,0 +1,15 @@
1
+ interface PostgresWriterOptions {
2
+ batch_size?: number;
3
+ }
4
+ export declare class PostgresWriter {
5
+ private CURRENT_BATCH_DATA;
6
+ private DB_CLIENT;
7
+ private DB_TABLE;
8
+ private INSTANCE_NAME;
9
+ private MAX_BATCH_SIZE;
10
+ private SESSION_TIMER;
11
+ constructor(instanceName: string, client: any, table: any, options?: PostgresWriterOptions);
12
+ flush(): Promise<void>;
13
+ write(data: any, options?: {}): Promise<void>;
14
+ }
15
+ export {};
@@ -0,0 +1,77 @@
1
+ /* * */
2
+ import { Logger } from '@tmlmobilidade/logger';
3
+ import { Timer } from '@tmlmobilidade/timer';
4
+ /* * */
5
+ export class PostgresWriter {
6
+ //
7
+ CURRENT_BATCH_DATA = [];
8
+ DB_CLIENT = null;
9
+ DB_TABLE = null;
10
+ INSTANCE_NAME = 'Unnamed Instance';
11
+ MAX_BATCH_SIZE = 250;
12
+ SESSION_TIMER = new Timer();
13
+ /* * */
14
+ constructor(instanceName, client, table, options) {
15
+ if (instanceName)
16
+ this.INSTANCE_NAME = instanceName;
17
+ if (client)
18
+ this.DB_CLIENT = client;
19
+ if (table)
20
+ this.DB_TABLE = table;
21
+ if (options?.batch_size)
22
+ this.MAX_BATCH_SIZE = options.batch_size;
23
+ }
24
+ /* * */
25
+ async flush() {
26
+ try {
27
+ //
28
+ const flushTimer = new Timer();
29
+ const sssionTimerResult = this.SESSION_TIMER.get();
30
+ if (this.CURRENT_BATCH_DATA.length === 0)
31
+ return;
32
+ const columns = Object.keys(this.CURRENT_BATCH_DATA[0]);
33
+ const values = [];
34
+ const placeholders = this.CURRENT_BATCH_DATA.map((item) => {
35
+ const rowValues = columns.map((key) => {
36
+ values.push(item[key]);
37
+ return `$${values.length}`;
38
+ });
39
+ return `(${rowValues.join(', ')})`;
40
+ });
41
+ const insertQuery = `
42
+ BEGIN;
43
+ DELETE FROM ${this.DB_TABLE} WHERE code = $1;
44
+ INSERT INTO ${this.DB_TABLE} (${columns.join(', ')})
45
+ VALUES (${placeholders.join(', ')});
46
+ COMMIT;
47
+ `;
48
+ await this.DB_CLIENT.query(insertQuery, values);
49
+ Logger.info(`POSTGRESWRITER [${this.INSTANCE_NAME}]: Flush | Length: ${this.CURRENT_BATCH_DATA.length} | DB Table: ${this.DB_TABLE} (session: ${sssionTimerResult}) (flush: ${flushTimer.get()})`);
50
+ this.CURRENT_BATCH_DATA = [];
51
+ //
52
+ }
53
+ catch (error) {
54
+ Logger.error(`POSTGRESWRITER [${this.INSTANCE_NAME}]: Error @ flush(): ${error.message}`);
55
+ }
56
+ }
57
+ /* * */
58
+ async write(data, options = {}) {
59
+ // Check if the batch is full
60
+ if (this.CURRENT_BATCH_DATA.length >= this.MAX_BATCH_SIZE) {
61
+ await this.flush();
62
+ }
63
+ // Reset the timer
64
+ if (this.CURRENT_BATCH_DATA.length === 0) {
65
+ this.SESSION_TIMER.reset();
66
+ }
67
+ // Add the data to the batch
68
+ if (Array.isArray(data)) {
69
+ const combinedDataWithOptions = data.map(item => ({ data: item, options: options }));
70
+ this.CURRENT_BATCH_DATA = [...this.CURRENT_BATCH_DATA, ...combinedDataWithOptions];
71
+ }
72
+ else {
73
+ this.CURRENT_BATCH_DATA.push(data);
74
+ }
75
+ //
76
+ }
77
+ }
package/package.json ADDED
@@ -0,0 +1,51 @@
1
+ {
2
+ "name": "@tmlmobilidade/writers",
3
+ "version": "20251202.1817.5",
4
+ "author": {
5
+ "email": "iso@tmlmobilidade.pt",
6
+ "name": "TML-ISO"
7
+ },
8
+ "license": "AGPL-3.0-or-later",
9
+ "homepage": "https://github.com/tmlmobilidade/go#readme",
10
+ "bugs": {
11
+ "url": "https://github.com/tmlmobilidade/go/issues"
12
+ },
13
+ "repository": {
14
+ "type": "git",
15
+ "url": "git+https://github.com/tmlmobilidade/go.git"
16
+ },
17
+ "keywords": [
18
+ "public transit",
19
+ "tml",
20
+ "transportes metropolitanos de lisboa",
21
+ "go"
22
+ ],
23
+ "publishConfig": {
24
+ "access": "public"
25
+ },
26
+ "type": "module",
27
+ "files": [
28
+ "dist"
29
+ ],
30
+ "main": "./dist/index.js",
31
+ "types": "./dist/index.d.ts",
32
+ "scripts": {
33
+ "build": "tsc && resolve-tspaths",
34
+ "lint": "eslint ./src/ && tsc --noEmit",
35
+ "lint:fix": "eslint ./src/ --fix",
36
+ "watch": "tsc-watch --onSuccess 'resolve-tspaths'"
37
+ },
38
+ "dependencies": {
39
+ "@tmlmobilidade/logger": "*",
40
+ "@tmlmobilidade/timer": "*",
41
+ "papaparse": "5.5.3"
42
+ },
43
+ "devDependencies": {
44
+ "@tmlmobilidade/tsconfig": "*",
45
+ "@tmlmobilidade/types": "*",
46
+ "@types/node": "24.10.1",
47
+ "resolve-tspaths": "0.8.23",
48
+ "tsc-watch": "7.2.0",
49
+ "typescript": "5.9.3"
50
+ }
51
+ }