@engine9-io/input-tools 1.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,45 @@
1
+ const { google } = require('googleapis');
2
+ const fs = require('node:fs');
3
+
4
+ const fsp = fs.promises;
5
+
6
+ function Worker() {}
7
+
8
+ Worker.prototype.setAuth = async function () {
9
+ const keyFile = process.env.GOOGLE_APPLICATION_CREDENTIALS;
10
+ const settings = JSON.parse(await fsp.readFile(keyFile));
11
+ if (!settings.subject_to_impersonate) throw new Error(`You should include subject_to_impersonate in file ${keyFile}`);
12
+
13
+ const auth = new google.auth.GoogleAuth({
14
+ clientOptions: {
15
+ subject: settings.subject_to_impersonate,
16
+ },
17
+ keyFile,
18
+ scopes: ['https://www.googleapis.com/auth/drive'],
19
+ });
20
+ google.options({
21
+ auth,
22
+ });
23
+ };
24
+
25
+ Worker.prototype.list = async function ({ path }) {
26
+ await this.setAuth();
27
+ const drive = google.drive({ version: 'v3' });
28
+ const folderId = path;
29
+ const q = `'${folderId}' in parents and trashed=false`;
30
+ const raw = await drive.files.list({
31
+ pageSize: 150,
32
+ q,
33
+ supportsAllDrives: true, // include share drives as well
34
+ includeItemsFromAllDrives: true,
35
+ });
36
+
37
+ return raw.data?.files;
38
+ };
39
+ Worker.prototype.list.metadata = {
40
+ options: {
41
+ path: {},
42
+ },
43
+ };
44
+
45
+ module.exports = Worker;
@@ -0,0 +1,138 @@
1
+ const parquet = require('@dsnp/parquetjs');
2
+
3
+ const { Readable } = require('node:stream');
4
+ const debug = require('debug')('ParquetWorker');
5
+ const { S3Client } = require('@aws-sdk/client-s3');
6
+ const FileWorker = require('./FileUtilities');
7
+
8
+ function Worker() {}
9
+
10
+ async function getReader(options) {
11
+ const { filename } = options;
12
+ if (!filename) throw new Error('filename is required');
13
+ if (filename.indexOf('s3://') === 0) {
14
+ const client = new S3Client({});
15
+ const parts = filename.split('/');
16
+
17
+ return parquet.ParquetReader.openS3(client, {
18
+ Bucket: parts[2],
19
+ Key: parts.slice(3).join('/'),
20
+ });
21
+ }
22
+ return parquet.ParquetReader.openFile(filename);
23
+ }
24
+
25
+ Worker.prototype.meta = async function (options) {
26
+ const reader = await getReader(options);
27
+ return {
28
+ records: String(reader.metadata?.num_rows),
29
+ };
30
+ // getMetadata();
31
+ };
32
+ Worker.prototype.meta.metadata = {
33
+ options: {
34
+ path: {},
35
+ },
36
+ };
37
+ Worker.prototype.schema = async function (options) {
38
+ const reader = await getReader(options);
39
+ return reader.getSchema();
40
+ };
41
+ Worker.prototype.schema.metadata = {
42
+ options: {
43
+ path: {},
44
+ },
45
+ };
46
+
47
+ function cleanColumnName(name) {
48
+ name.toLowerCase().replace(/[^a-z0-9_]/g, '_');
49
+ }
50
+
51
+ Worker.prototype.stream = async function (options) {
52
+ const stream = new Readable({ objectMode: true });
53
+
54
+ const reader = await getReader(options);
55
+ let columns;
56
+ if (options.columns) {
57
+ const { fieldList } = await this.schema(options);
58
+ columns = [];
59
+ let requestedColumns = options.columns;
60
+ if (typeof options.columns === 'string') requestedColumns = options.columns.split(',').map((d) => d.trim());
61
+ else requestedColumns = options.columns.map((d) => (d.name ? d.name.trim() : d.trim()));
62
+ requestedColumns.forEach((c) => {
63
+ columns = columns.concat(
64
+ fieldList.filter((f) => (
65
+ f.name === c || cleanColumnName(f.name) === cleanColumnName(c)
66
+ )).map((f) => f.name),
67
+ );
68
+ });
69
+ }
70
+ let limit = 0;
71
+ if (parseInt(options.limit, 10) === options.limit) limit = parseInt(options.limit, 10);
72
+ // create a new cursor
73
+ debug(`Reading parquet file ${options.filename} with columns ${columns?.join(',')} and limit ${limit}`);
74
+ const cursor = reader.getCursor(columns);
75
+
76
+ // read all records from the file and print them
77
+ let record = null;
78
+ let counter = 0;
79
+
80
+ const start = new Date().getTime();
81
+ do {
82
+ // eslint-disable-next-line no-await-in-loop
83
+ record = await cursor.next();
84
+ counter += 1;
85
+ if (limit && counter > limit) {
86
+ debug(`Reached limit of ${limit}, stopping`);
87
+ break;
88
+ }
89
+ if (counter % 5000 === 0) {
90
+ const end = new Date().getTime();
91
+ debug(`Read ${counter} ${(counter * 1000) / (end - start)}/sec `);
92
+ }
93
+ stream.push(record);
94
+ } while (record);
95
+ stream.push(null);
96
+ await reader.close();
97
+
98
+ return { stream };
99
+ };
100
+
101
+ Worker.prototype.stream.metadata = {
102
+ options: {
103
+ path: {},
104
+ },
105
+ };
106
+
107
+ Worker.prototype.toFile = async function (options) {
108
+ const { stream } = await this.stream(options);
109
+ const fworker = new FileWorker(this);
110
+ return fworker.objectStreamToFile({ ...options, stream });
111
+ };
112
+ Worker.prototype.toFile.metadata = {
113
+ options: {
114
+ path: {},
115
+ },
116
+ };
117
+
118
+ Worker.prototype.stats = async function (options) {
119
+ const reader = await getReader(options);
120
+ const schema = reader.getSchema();
121
+ const fileMetadata = reader.getFileMetaData();
122
+ const rowGroups = reader.getRowGroups();
123
+
124
+ // const reader = await parquet.ParquetReader.openS3(client, getParams(options));
125
+ // return reader.getSchema();
126
+ return {
127
+ schema,
128
+ fileMetadata,
129
+ rowGroups,
130
+ };
131
+ };
132
+ Worker.prototype.stats.metadata = {
133
+ options: {
134
+ path: {},
135
+ },
136
+ };
137
+
138
+ module.exports = Worker;
package/file/S3.js ADDED
@@ -0,0 +1,246 @@
1
+ const debug = require('debug')('S3Worker');
2
+ const fs = require('node:fs');
3
+ // eslint-disable-next-line import/no-unresolved
4
+ const { mimeType: mime } = require('mime-type/with-db');
5
+ const {
6
+ S3Client,
7
+ GetObjectCommand,
8
+ HeadObjectCommand,
9
+ GetObjectAttributesCommand, PutObjectCommand,
10
+ ListObjectsV2Command,
11
+ } = require('@aws-sdk/client-s3');
12
+ const { getTempFilename } = require('./tools');
13
+
14
+ function Worker() {}
15
+
16
+ function getParts(filename) {
17
+ if (!filename || filename.indexOf('s3://') !== 0) throw new Error(`Invalid filename for s3:${filename}`);
18
+ const parts = filename.split('/');
19
+ const Bucket = parts[2];
20
+ const Key = parts.slice(3).join('/');
21
+ return { Bucket, Key };
22
+ }
23
+ Worker.prototype.getClient = function () {
24
+ if (!this.client) this.client = new S3Client({});
25
+ return this.client;
26
+ };
27
+
28
+ Worker.prototype.getMetadata = async function ({ filename }) {
29
+ const s3Client = this.getClient();
30
+ const { Bucket, Key } = getParts(filename);
31
+
32
+ const resp = await s3Client.send(new GetObjectAttributesCommand({
33
+ Bucket,
34
+ Key,
35
+ ObjectAttributes: ['ETag', 'Checksum', 'ObjectParts', 'StorageClass', 'ObjectSize'],
36
+ }));
37
+
38
+ return resp;
39
+ };
40
+ Worker.prototype.getMetadata.metadata = {
41
+ options: {
42
+ filename: {},
43
+ },
44
+ };
45
+
46
+ Worker.prototype.stream = async function ({ filename }) {
47
+ const s3Client = new S3Client({});
48
+ const { Bucket, Key } = getParts(filename);
49
+ const command = new GetObjectCommand({ Bucket, Key });
50
+ try {
51
+ debug(`Streaming file ${Key}`);
52
+ const response = await s3Client.send(command);
53
+ return { stream: response.Body };
54
+ } catch (e) {
55
+ debug(`Could not stream filename:${filename}`);
56
+ throw e;
57
+ }
58
+ };
59
+ Worker.prototype.stream.metadata = {
60
+ options: {
61
+ filename: {},
62
+ },
63
+ };
64
+
65
+ Worker.prototype.download = async function ({ filename }) {
66
+ const file = filename.split('/').pop();
67
+ const localPath = await getTempFilename({ targetFilename: file });
68
+ const s3Client = new S3Client({});
69
+ const { Bucket, Key } = getParts(filename);
70
+ const command = new GetObjectCommand({ Bucket, Key });
71
+ debug(`Downloading ${file} to ${localPath}`);
72
+ const response = await s3Client.send(command);
73
+ const fileStream = fs.createWriteStream(localPath);
74
+
75
+ response.Body.pipe(fileStream);
76
+
77
+ return new Promise((resolve, reject) => {
78
+ fileStream.on('finish', async () => {
79
+ const { size } = await fs.promises.stat(localPath);
80
+ resolve({ size, filename: localPath });
81
+ });
82
+ fileStream.on('error', reject);
83
+ });
84
+ };
85
+ Worker.prototype.download.metadata = {
86
+ options: {
87
+ filename: {},
88
+ },
89
+ };
90
+
91
+ Worker.prototype.put = async function (options) {
92
+ const { filename, directory } = options;
93
+ if (!filename) throw new Error('Local filename required');
94
+ if (directory?.indexOf('s3://') !== 0) throw new Error(`directory path must start with s3://, is ${directory}`);
95
+
96
+ const file = options.file || filename.split('/').pop();
97
+ const parts = directory.split('/');
98
+ const Bucket = parts[2];
99
+ const Key = parts.slice(3).filter(Boolean).concat(file).join('/');
100
+ const Body = fs.createReadStream(filename);
101
+
102
+ const ContentType = mime.lookup(file);
103
+
104
+ debug(`Putting ${filename} to ${JSON.stringify({ Bucket, Key, ContentType })}}`);
105
+ const s3Client = new S3Client({});
106
+
107
+ const command = new PutObjectCommand({
108
+ Bucket, Key, Body, ContentType,
109
+ });
110
+
111
+ return s3Client.send(command);
112
+ };
113
+ Worker.prototype.put.metadata = {
114
+ options: {
115
+ filename: {},
116
+ directory: { description: 'Directory to put file, e.g. s3://foo-bar/dir/xyz' },
117
+ file: { description: 'Name of file, defaults to the filename' },
118
+ },
119
+ };
120
+
121
+ Worker.prototype.write = async function (options) {
122
+ const { directory, file, content } = options;
123
+
124
+ if (!directory?.indexOf('s3://') === 0) throw new Error('directory must start with s3://');
125
+ const parts = directory.split('/');
126
+
127
+ const Bucket = parts[2];
128
+ const Key = parts.slice(3).filter(Boolean).concat(file).join('/');
129
+ const Body = content;
130
+
131
+ debug(`Writing content of length ${content.length} to ${JSON.stringify({ Bucket, Key })}}`);
132
+ const s3Client = new S3Client({});
133
+ const ContentType = mime.lookup(file);
134
+
135
+ const command = new PutObjectCommand({
136
+ Bucket, Key, Body, ContentType,
137
+ });
138
+
139
+ return s3Client.send(command);
140
+ };
141
+ Worker.prototype.write.metadata = {
142
+ options: {
143
+ directory: { description: 'Directory to put file, e.g. s3://foo-bar/dir/xyz' },
144
+ file: { description: 'Name of file, defaults to the filename' },
145
+ content: { description: 'Contents of file' },
146
+ },
147
+ };
148
+
149
+ Worker.prototype.list = async function ({ directory }) {
150
+ if (!directory) throw new Error('directory is required');
151
+ let dir = directory;
152
+ while (dir.slice(-1) === '/') dir = dir.slice(0, -1);
153
+ const { Bucket, Key: Prefix } = getParts(dir);
154
+ const s3Client = new S3Client({});
155
+ const command = new ListObjectsV2Command({
156
+ Bucket,
157
+ Prefix: `${Prefix}/`,
158
+ Delimiter: '/',
159
+ });
160
+
161
+ const { Contents: files, CommonPrefixes } = await s3Client.send(command);
162
+ // debug('Prefixes:', { CommonPrefixes });
163
+ const output = [].concat((CommonPrefixes || []).map((f) => ({
164
+ name: f.Prefix.slice(Prefix.length + 1, -1),
165
+ type: 'directory',
166
+ })))
167
+ .concat((files || []).map(({ Key }) => ({
168
+ name: Key.slice(Prefix.length + 1),
169
+ type: 'file',
170
+ })));
171
+
172
+ return output;
173
+ };
174
+ Worker.prototype.list.metadata = {
175
+ options: {
176
+ directory: { required: true },
177
+ },
178
+ };
179
+ /* List everything with the prefix */
180
+ Worker.prototype.listAll = async function ({ directory }) {
181
+ if (!directory) throw new Error('directory is required');
182
+ let dir = directory;
183
+ while (dir.slice(-1) === '/') dir = dir.slice(0, -1);
184
+ const { Bucket, Key: Prefix } = getParts(dir);
185
+ const s3Client = new S3Client({});
186
+ const files = [];
187
+ let ContinuationToken = null;
188
+ do {
189
+ const command = new ListObjectsV2Command({
190
+ Bucket,
191
+ Prefix: `${Prefix}/`,
192
+ ContinuationToken,
193
+ // Delimiter: '/',
194
+ });
195
+ debug(`Sending command with ContinuationToken ${ContinuationToken}`);
196
+ // eslint-disable-next-line no-await-in-loop
197
+ const result = await s3Client.send(command);
198
+ const newFiles = (result.Contents?.map((d) => `s3://${Bucket}/${d.Key}`) || []);
199
+ debug(`Retrieved ${newFiles.length} new files, total ${files.length},sample ${newFiles.slice(0, 3).join(',')}`);
200
+ files.push(...newFiles);
201
+ ContinuationToken = result.NextContinuationToken;
202
+ } while (ContinuationToken);
203
+ return files;
204
+ };
205
+ Worker.prototype.listAll.metadata = {
206
+ options: {
207
+ directory: { required: true },
208
+ },
209
+ };
210
+
211
+ Worker.prototype.stat = async function ({ filename }) {
212
+ if (!filename) throw new Error('filename is required');
213
+
214
+ const s3Client = new S3Client({});
215
+ const { Bucket, Key } = getParts(filename);
216
+ const command = new HeadObjectCommand({ Bucket, Key });
217
+ const response = await s3Client.send(command);
218
+
219
+ const {
220
+ // "AcceptRanges": "bytes",
221
+ ContentLength, // : "3191",
222
+ ContentType, // : "image/jpeg",
223
+ // ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"",
224
+ LastModified, // : "2016-12-15T01:19:41.000Z",
225
+ // Metadata": {},
226
+ // VersionId": "null"
227
+
228
+ } = response;
229
+ const modifiedAt = new Date(LastModified);
230
+ const createdAt = modifiedAt;// Same for S3
231
+ const size = parseInt(ContentLength, 10);
232
+
233
+ return {
234
+ createdAt,
235
+ modifiedAt,
236
+ contentType: ContentType,
237
+ size,
238
+ };
239
+ };
240
+ Worker.prototype.stat.metadata = {
241
+ options: {
242
+ filename: {},
243
+ },
244
+ };
245
+
246
+ module.exports = Worker;
package/file/tools.js ADDED
@@ -0,0 +1,237 @@
1
+ const fs = require('node:fs');
2
+
3
+ const fsp = fs.promises;
4
+ const path = require('node:path');
5
+ const debug = require('debug')('@engine9/input-tools');
6
+ const os = require('node:os');
7
+ const { mkdirp } = require('mkdirp');
8
+ const { Transform } = require('node:stream');
9
+
10
+ const JSON5 = require('json5');
11
+ const { PassThrough } = require('node:stream');
12
+ const progress = require('debug')('info:@engine9/input-tools');
13
+ const unzipper = require('unzipper');
14
+
15
+ const {
16
+ v7: uuidv7,
17
+ } = require('uuid');
18
+
19
+ async function getTempDir({ accountId = 'engine9' }) {
20
+ const dir = [os.tmpdir(), accountId, new Date().toISOString().substring(0, 10)].join(path.sep);
21
+ try {
22
+ await mkdirp(dir);
23
+ } catch (err) {
24
+ if (err.code !== 'EEXIST') throw err;
25
+ }
26
+ return dir;
27
+ }
28
+
29
+ /*
30
+ Get a new, timestamp based filename, creating any necessary directories
31
+ options:
32
+ prefix/postfix of file
33
+ source:source file, used to generate friendly name
34
+ */
35
+ async function getTempFilename(options) {
36
+ let dir = await getTempDir(options);
37
+
38
+ const target = options.targetFilename;
39
+ if (target) {
40
+ if (target.indexOf('/') === 0 || target.indexOf('\\') === 0) {
41
+ // assume a full directory path has been specified
42
+ return target;
43
+ }
44
+
45
+ // make a distinct directory, so we don't overwrite the file
46
+ dir = `${dir}/${new Date().toISOString().slice(0, -6).replace(/[^0-9]/g, '_')}`;
47
+
48
+ const newDir = await mkdirp(dir);
49
+
50
+ return `${newDir}/${target}`;
51
+ }
52
+ let { prefix } = options;
53
+ let { postfix } = options;
54
+ const { targetFormat } = options;
55
+ if (!postfix && targetFormat === 'csv') postfix = '.csv';
56
+ if (options.source) {
57
+ postfix = `_${options.source.split('/').pop()}`;
58
+ postfix = postfix.replace(/['"\\]/g, '').replace(/[^a-zA-Z0-9_.-]/g, '_');
59
+ }
60
+
61
+ if (prefix) prefix += '_';
62
+
63
+ const p = `${dir}/${prefix || ''}${uuidv7()}${postfix || '.txt'}`;
64
+ return p;
65
+ }
66
+
67
+ const {
68
+ S3Client,
69
+ HeadObjectCommand,
70
+ GetObjectCommand,
71
+ } = require('@aws-sdk/client-s3');
72
+
73
+ async function getPacketFiles({ packet }) {
74
+ if (packet.indexOf('s3://') === 0) {
75
+ const parts = packet.split('/');
76
+ const Bucket = parts[2];
77
+ const Key = parts.slice(3).join('/');
78
+ const s3Client = new S3Client({});
79
+
80
+ debug('Getting ', { Bucket, Key });
81
+
82
+ // const directory = await unzipper.Open.s3(s3Client, { Bucket, Key });
83
+ let size = null;
84
+ const directory = await unzipper.Open.custom({
85
+ async size() {
86
+ const info = await s3Client.send(
87
+ new HeadObjectCommand({
88
+ Bucket,
89
+ Key,
90
+ }),
91
+ );
92
+ size = info.ContentLength;
93
+ progress(`Retrieving file of size ${size / (1024 * 1024)} MB`);
94
+ return info.ContentLength;
95
+ },
96
+
97
+ stream(offset, length) {
98
+ const ptStream = new PassThrough();
99
+ s3Client.send(
100
+ new GetObjectCommand({
101
+ Bucket,
102
+ Key,
103
+ Range: `bytes=${offset}-${length ?? ''}`,
104
+ }),
105
+ )
106
+ .then((response) => {
107
+ response.Body.pipe(ptStream);
108
+ })
109
+ .catch((error) => {
110
+ ptStream.emit('error', error);
111
+ });
112
+
113
+ return ptStream;
114
+ },
115
+ });
116
+
117
+ return directory;
118
+ }
119
+ const directory = await unzipper.Open.file(packet);
120
+ return directory;
121
+ }
122
+
123
+ async function getManifest({ packet }) {
124
+ if (!packet) throw new Error('no packet option specififed');
125
+ const { files } = await getPacketFiles({ packet });
126
+ const file = files.find((d) => d.path === 'manifest.json');
127
+ const content = await file.buffer();
128
+ const manifest = JSON.parse(content.toString());
129
+ return manifest;
130
+ }
131
+
132
+ function getBatchTransform({ batchSize = 100 }) {
133
+ return {
134
+ transform: new Transform({
135
+ objectMode: true,
136
+ transform(chunk, encoding, cb) {
137
+ this.buffer = (this.buffer || []).concat(chunk);
138
+ if (this.buffer.length >= batchSize) {
139
+ this.push(this.buffer);
140
+ this.buffer = [];
141
+ }
142
+ cb();
143
+ },
144
+ flush(cb) {
145
+ if (this.buffer?.length > 0) this.push(this.buffer);
146
+ cb();
147
+ },
148
+ }),
149
+ };
150
+ }
151
+ function getDebatchTransform() {
152
+ return {
153
+ transform: new Transform({
154
+ objectMode: true,
155
+ transform(chunk, encoding, cb) {
156
+ chunk.forEach((c) => this.push(c));
157
+ cb();
158
+ },
159
+ }),
160
+ };
161
+ }
162
+
163
+ async function getFile({ filename, packet, type }) {
164
+ if (!packet && !filename) throw new Error('no packet option specififed');
165
+ let content = null;
166
+ let filePath = null;
167
+ if (packet) {
168
+ const manifest = await getManifest({ packet });
169
+ const manifestFiles = manifest.files?.filter((d) => d.type === type);
170
+ if (!manifestFiles?.length) throw new Error(`No files of type ${type} found in packet`);
171
+ if (manifestFiles?.length > 1) throw new Error(`Multiple files of type ${type} found in packet`);
172
+ filePath = manifestFiles[0].path;
173
+ const { files } = await getPacketFiles({ packet });
174
+ const handle = files.find((d) => d.path === filePath);
175
+ const buffer = await handle.buffer();
176
+ content = await buffer.toString();
177
+ } else {
178
+ content = await fsp.readFile(filename);
179
+ filePath = filename.split('/').pop();
180
+ }
181
+ if (filePath.slice(-5) === '.json' || filePath.slice(-6) === '.json5') {
182
+ try {
183
+ return JSON5.parse(content);
184
+ } catch (e) {
185
+ debug(`Erroring parsing json content from ${path}`, content);
186
+ throw e;
187
+ }
188
+ }
189
+ return content;
190
+ }
191
+
192
+ async function streamPacket({ packet, type }) {
193
+ if (!packet) throw new Error('no packet option specififed');
194
+ const manifest = await getManifest({ packet });
195
+ const manifestFiles = manifest.files?.filter((d) => d.type === type);
196
+ if (!manifestFiles?.length) throw new Error(`No files of type ${type} found in packet`);
197
+ if (manifestFiles?.length > 1) throw new Error(`Multiple files of type ${type} found in packet`);
198
+ const filePath = manifestFiles[0].path;
199
+ const { files } = await getPacketFiles({ packet });
200
+ const handle = files.find((d) => d.path === filePath);
201
+ return { stream: handle.stream(), path: filePath };
202
+ }
203
+
204
+ async function downloadFile({ packet, type = 'person' }) {
205
+ const { stream: fileStream, path: filePath } = await streamPacket({ packet, type });
206
+ const filename = await getTempFilename({ targetFilename: filePath.split('/').pop() });
207
+
208
+ return new Promise((resolve, reject) => {
209
+ fileStream.pipe(fs.createWriteStream(filename))
210
+ .on('error', reject)
211
+ .on('finish', () => {
212
+ resolve({ filename });
213
+ });
214
+ });
215
+ }
216
+
217
+ function bool(x, _defaultVal) {
218
+ const defaultVal = (_defaultVal === undefined) ? false : _defaultVal;
219
+ if (x === undefined || x === null || x === '') return defaultVal;
220
+ if (typeof x !== 'string') return !!x;
221
+ if (x === '1') return true; // 0 will return false, but '1' is true
222
+ const y = x.toLowerCase();
223
+ return !!(y.indexOf('y') + 1) || !!(y.indexOf('t') + 1);
224
+ }
225
+
226
+ module.exports = {
227
+ bool,
228
+ getTempFilename,
229
+ downloadFile,
230
+ getBatchTransform,
231
+ getDebatchTransform,
232
+ getFile,
233
+ getManifest,
234
+ getPacketFiles,
235
+ getTempDir,
236
+ streamPacket,
237
+ };