@hahnpro/flow-cli 2.11.0 → 2.12.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE CHANGED
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2020 Hahn PRO
3
+ Copyright (c) 2021 Hahn PRO
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # `@hahnpro/flow-cli`
2
2
 
3
- https://gitlab.com/hahnpro/flow
3
+ https://github.com/hahnprojects/flow
4
4
 
5
5
  # Commands
6
6
 
@@ -1,35 +1,46 @@
1
1
  #!/usr/bin/env node
2
- require('reflect-metadata');
3
-
4
- const archiver = require('archiver');
5
- let axios = require('axios').default;
6
- const chalk = require('chalk');
7
- const { Command } = require('commander');
8
- const copyfiles = require('copyfiles');
9
- const execa = require('execa');
10
- const FormData = require('form-data');
11
- const fs = require('fs');
12
- const glob = require('glob');
13
- const HttpsProxyAgent = require('https-proxy-agent');
14
- const ora = require('ora');
15
- const path = require('path');
16
-
17
- require('dotenv').config();
18
-
19
- const log = console.log;
20
- const ok = chalk.bold.green;
21
- const error = chalk.bold.red;
2
+ import 'reflect-metadata';
3
+ import 'dotenv/config';
4
+
5
+ import archiver from 'archiver';
6
+ import Axios from 'axios';
7
+ import chalk from 'chalk';
8
+ import { Command } from 'commander';
9
+ import copyfiles from 'copyfiles';
10
+ import { execa } from 'execa';
11
+ import FormData from 'form-data';
12
+ import glob from 'glob';
13
+ import HttpsProxyAgent from 'https-proxy-agent';
14
+ import fs from 'node:fs';
15
+ import { createRequire } from 'node:module';
16
+ import path from 'node:path';
17
+ import ora from 'ora';
18
+
19
+ import { handleConvertedOutput, prepareTsFile } from './utils.mjs';
20
+
21
+ const require = createRequire(import.meta.url);
22
+
23
+ const logger = {
24
+ /* eslint-disable no-console */
25
+ log: console.log,
26
+ error: (message) => console.log(chalk.bold.red(message)),
27
+ ok: (message) => console.log(chalk.bold.green(message)),
28
+ /* eslint-enable no-console */
29
+ };
22
30
 
23
31
  const apiUser = process.env.API_USER;
24
32
  const apiKey = process.env.API_KEY;
25
33
  const baseUrl = process.env.PLATFORM_URL;
26
- const buildDir = process.env.BUILD_DIR || 'dist';
34
+ const buildDirectory = process.env.BUILD_DIR || 'dist';
27
35
  const realm = process.env.REALM;
28
36
  const authUrl = process.env.AUTH_URL || `${baseUrl}/auth/realms/${realm}/protocol/openid-connect/token`;
29
37
 
38
+ let axios;
30
39
  if (process.env.https_proxy || process.env.http_proxy) {
31
40
  const httpsAgent = new HttpsProxyAgent(process.env.https_proxy || process.env.http_proxy);
32
- axios = axios.create({ httpsAgent, proxy: false });
41
+ axios = Axios.create({ httpsAgent, proxy: false });
42
+ } else {
43
+ axios = Axios;
33
44
  }
34
45
 
35
46
  let apiToken;
@@ -41,7 +52,6 @@ const CMD = {
41
52
  FORMAT: 'format',
42
53
  INSTALL: 'install',
43
54
  LINT: 'lint',
44
- PUBLISH: 'publish',
45
55
  RUN: 'run',
46
56
  TEST: 'test',
47
57
  WATCH: 'watch',
@@ -50,7 +60,7 @@ const CMD = {
50
60
  const program = new Command();
51
61
 
52
62
  program
53
- .version('2.10.0', '-v, --version')
63
+ .version('2.12.0', '-v, --version')
54
64
  .usage('[command] [options]')
55
65
  .description('Flow Module Management Tool')
56
66
  .on('--help', () => {});
@@ -65,8 +75,8 @@ program
65
75
  await exec(CMD.INSTALL, project);
66
76
  await exec(CMD.BUILD, project);
67
77
  await copyProjectFiles(project);
68
- } catch (err) {
69
- if (err) log(err);
78
+ } catch (error) {
79
+ if (error) logger.log(error);
70
80
  process.exit(1);
71
81
  }
72
82
  });
@@ -85,8 +95,8 @@ program
85
95
  const project = await findProject(projectName);
86
96
  await exec(CMD.INSTALL, project);
87
97
  }
88
- } catch (err) {
89
- if (err) log(err);
98
+ } catch (error) {
99
+ if (error) logger.log(error);
90
100
  process.exit(1);
91
101
  }
92
102
  });
@@ -105,8 +115,8 @@ program
105
115
  const project = await findProject(projectName);
106
116
  await exec(CMD.AUDIT, project);
107
117
  }
108
- } catch (err) {
109
- if (err) log(err);
118
+ } catch (error) {
119
+ if (error) logger.log(error);
110
120
  process.exit(1);
111
121
  }
112
122
  });
@@ -122,8 +132,8 @@ program
122
132
  }
123
133
  project = await findProject(projectName);
124
134
  await exec(CMD.LINT, project);
125
- } catch (err) {
126
- if (err) log(err);
135
+ } catch (error) {
136
+ if (error) logger.log(error);
127
137
  process.exit(1);
128
138
  }
129
139
  });
@@ -134,8 +144,8 @@ program
134
144
  .action(async () => {
135
145
  try {
136
146
  await exec(CMD.FORMAT, { name: 'all' });
137
- } catch (err) {
138
- if (err) log(err);
147
+ } catch (error) {
148
+ if (error) logger.log(error);
139
149
  process.exit(1);
140
150
  }
141
151
  });
@@ -147,14 +157,14 @@ program
147
157
  try {
148
158
  if (checkIfAll(projectName)) process.exit(1);
149
159
  const project = await findProject(projectName);
150
- await clean(buildDir);
160
+ await clean(buildDirectory);
151
161
  await exec(CMD.INSTALL, project);
152
162
  await exec(CMD.BUILD, project);
153
163
  await copyProjectFiles(project);
154
164
  await validateModule(project);
155
165
  await packageModule(project);
156
- } catch (err) {
157
- if (err) log(err);
166
+ } catch (error) {
167
+ if (error) logger.log(error);
158
168
  process.exit(1);
159
169
  }
160
170
  });
@@ -167,7 +177,7 @@ program
167
177
  .description('Publishes specified Module to Cloud Platform')
168
178
  .action(async (projectName, options) => {
169
179
  try {
170
- if (checkEnvModules()) process.exit(1);
180
+ if (checkEnvironmentModules()) process.exit(1);
171
181
  const projects = [];
172
182
  if (projectName === 'all') {
173
183
  for (const project of await findProjects()) {
@@ -179,25 +189,25 @@ program
179
189
 
180
190
  await getAccessToken();
181
191
  for (const project of projects) {
182
- await clean(buildDir);
192
+ await clean(buildDirectory);
183
193
  await exec(CMD.INSTALL, project);
184
194
  await exec(CMD.BUILD, project);
185
195
  await copyProjectFiles(project);
186
196
  await validateModule(project);
187
197
  try {
188
198
  await publishModule(project);
189
- } catch (e) {
199
+ } catch (error) {
190
200
  if (
191
201
  options.skip &&
192
- e &&
193
- e.response &&
194
- e.response.data &&
195
- e.response.data.message === 'New module version must greater than latest version'
202
+ error &&
203
+ error.response &&
204
+ error.response.data &&
205
+ error.response.data.message === 'New module version must greater than latest version'
196
206
  ) {
197
- log(ok(`Module "${project.name}" is up to date. Skipping.`));
207
+ logger.ok(`Module "${project.name}" is up to date. Skipping.`);
198
208
  } else {
199
- log(error(`Publishing Module "${project.name}" failed.`));
200
- handleApiError(e);
209
+ logger.error(`Publishing Module "${project.name}" failed.`);
210
+ handleApiError(error);
201
211
  process.exit(1);
202
212
  }
203
213
  }
@@ -205,8 +215,8 @@ program
205
215
  await publishFunctions(project, options.update);
206
216
  }
207
217
  }
208
- } catch (err) {
209
- if (err) log(err);
218
+ } catch (error) {
219
+ if (error) logger.log(error);
210
220
  process.exit(1);
211
221
  }
212
222
  });
@@ -217,7 +227,7 @@ program
217
227
  .description('Publishes all Flow Functions inside specified Module to Cloud Platform')
218
228
  .action(async (projectName, options) => {
219
229
  try {
220
- if (checkEnvModules()) process.exit(1);
230
+ if (checkEnvironmentModules()) process.exit(1);
221
231
  const projects = [];
222
232
  if (projectName === 'all') {
223
233
  for (const project of await findProjects()) {
@@ -231,8 +241,8 @@ program
231
241
  for (const project of projects) {
232
242
  await publishFunctions(project, options.update);
233
243
  }
234
- } catch (err) {
235
- if (err) log(err);
244
+ } catch (error) {
245
+ if (error) logger.log(error);
236
246
  process.exit(1);
237
247
  }
238
248
  });
@@ -248,8 +258,8 @@ program
248
258
  await exec(CMD.BUILD, project);
249
259
  await copyProjectFiles(project);
250
260
  await exec(CMD.WATCH, project);
251
- } catch (err) {
252
- if (err) log(err);
261
+ } catch (error) {
262
+ if (error) logger.log(error);
253
263
  process.exit(1);
254
264
  }
255
265
  });
@@ -262,8 +272,8 @@ program
262
272
  if (checkIfAll(projectName)) process.exit(1);
263
273
  const project = await findProject(projectName);
264
274
  await exec(CMD.RUN, project);
265
- } catch (err) {
266
- if (err) log(err);
275
+ } catch (error) {
276
+ if (error) logger.log(error);
267
277
  process.exit(1);
268
278
  }
269
279
  });
@@ -284,8 +294,8 @@ program
284
294
  const project = await findProject(projectName);
285
295
  await exec(CMD.TEST, project);
286
296
  }
287
- } catch (err) {
288
- if (err) log(err);
297
+ } catch (error) {
298
+ if (error) logger.log(error);
289
299
  process.exit(1);
290
300
  }
291
301
  });
@@ -302,27 +312,27 @@ program
302
312
  cwd: project.location,
303
313
  ignore: ['node_modules/**/*', '**/package*.json', '**/tsconfig*.json'],
304
314
  };
305
- glob('**/*.*', globOptions, async (err, files) => {
315
+ glob('**/*.*', globOptions, async (error, files) => {
306
316
  const filtered = files.filter((file) => !file.endsWith('.spec.ts'));
307
- const tsJsonMap = filtered.reduce((acc, cur, i, arr) => {
308
- if (cur.endsWith('.ts')) {
317
+ const tsJsonMap = filtered.reduce((accumulator, current, index, array) => {
318
+ if (current.endsWith('.ts')) {
309
319
  // get json file for current function
310
- const json = arr.find((v) => v === `${cur.split('.')[0]}.json`);
320
+ const json = array.find((v) => v === `${current.split('.')[0]}.json`);
311
321
  if (json) {
312
- acc.push({
313
- ts: path.join(globOptions.cwd, cur),
322
+ accumulator.push({
323
+ ts: path.join(globOptions.cwd, current),
314
324
  json: path.join(globOptions.cwd, json),
315
325
  });
316
326
  }
317
327
  }
318
- return acc;
328
+ return accumulator;
319
329
  }, []);
320
- tsJsonMap.forEach((entry) => {
321
- generateSchemasForFile(entry.ts, entry.json);
322
- });
330
+ for (let entry of tsJsonMap) {
331
+ await generateSchemasForFile(entry.ts, entry.json);
332
+ }
323
333
  });
324
- } catch (err) {
325
- if (err) log(err);
334
+ } catch (error) {
335
+ if (error) logger.log(error);
326
336
  process.exit(1);
327
337
  }
328
338
  });
@@ -331,216 +341,31 @@ if (process.env.NODE_ENV !== 'test') {
331
341
  program.parse(process.argv);
332
342
  }
333
343
 
334
- function generateSchemasForFile(tsPath, jsonPath) {
344
+ async function generateSchemasForFile(tsPath, jsonPath) {
335
345
  // get schema
336
- let json = require(path.join(process.cwd(), jsonPath));
346
+ let json = JSON.parse(await fs.promises.readFile(path.join(process.cwd(), jsonPath)));
337
347
 
338
348
  const filePath = path.join(process.cwd(), tsPath);
339
- const tsFile = String(fs.readFileSync(filePath));
340
- const dir = path.dirname(filePath);
341
-
342
- execa('ts-node', ['-T', '--dir', dir], { input: prepareTsFile(tsFile), preferLocal: true }).then((result) => {
343
- json = handleConvertedOutput(result.stdout, jsonPath, json);
349
+ const tsFile = String(await fs.promises.readFile(filePath));
350
+ const directory = path.dirname(filePath);
344
351
 
345
- fs.writeFileSync(path.join(process.cwd(), jsonPath), JSON.stringify(json, null, 2) + '\n');
346
- });
347
- }
348
-
349
- function handleConvertedOutput(result, jsonPath, json) {
350
- let schema;
351
- try {
352
- schema = JSON.parse(result);
353
- } catch (e) {
354
- log(error(result));
355
- return json;
356
- }
357
- [
358
- ['propertiesSchema', 'Properties'],
359
- ['inputStreams', 'InputProperties'],
360
- ['outputStreams', 'OutputProperties'],
361
- ].forEach((value) => {
362
- const propsSchema = schema[value[1]] || {};
363
- (propsSchema.required || []).forEach((reqProp) => {
364
- propsSchema.properties[reqProp] = { ...propsSchema.properties[reqProp], required: true };
365
- });
366
- // remove required field
367
- delete propsSchema.required;
368
-
369
- checkTypes(getTypes(jsonPath), propsSchema, jsonPath);
370
-
371
- const completeSchema = {
372
- schema: {
373
- type: 'object',
374
- properties: {
375
- ...propsSchema.properties,
376
- },
377
- },
378
- };
379
-
380
- if (value[0] === 'propertiesSchema') {
381
- if (!json['propertiesSchema']) {
382
- json['propertiesSchema'] = completeSchema;
383
- }
384
- } else {
385
- // check if config for default input/output stream exists
386
- if (!json[value[0]].find((v) => v.name === 'default')) {
387
- if (propsSchema) {
388
- json[value[0]].push({
389
- name: 'default',
390
- ...completeSchema,
391
- });
392
- }
393
- }
394
- }
395
- });
396
-
397
- // add definitions
398
- if (Object.keys(schema).some((key) => !['Properties', 'InputProperties', 'OutputProperties'].includes(key))) {
399
- const typeDefinitions = Object.keys(schema).filter((key) => !['Properties', 'InputProperties', 'OutputProperties'].includes(key));
400
- json.definitions = typeDefinitions.reduce((previousValue, currentValue) => {
401
- const additionalSchema = schema[currentValue];
402
- (additionalSchema.required || []).forEach((reqProp) => {
403
- additionalSchema.properties[reqProp] = { ...additionalSchema.properties[reqProp], required: true };
404
- });
405
- delete additionalSchema.required;
406
- previousValue[currentValue] = additionalSchema;
407
- return previousValue;
408
- }, {});
409
- }
410
- return json;
411
- }
412
-
413
- function checkTypes(definedTypes, propsSchema, jsonPath) {
414
- const knownTypes = [
415
- ...definedTypes,
416
- 'string',
417
- 'undefined',
418
- 'number',
419
- 'boolean',
420
- 'any',
421
- 'object',
422
- 'array',
423
- 'integer',
424
- 'Asset',
425
- 'AssetType',
426
- 'Flow',
427
- 'Secret',
428
- 'TimeSeries',
429
- ];
430
-
431
- // check if all types are known
432
- const props = propsSchema.properties || {};
433
- for (const prop of Object.keys(props)) {
434
- if (props[prop].type && !knownTypes.includes(props[prop].type)) {
435
- console.log(
436
- error(`ERROR: unknown type ${props[prop].type}.
437
- Please add a schema for this type in ${jsonPath}
438
- for more info check the documentation`),
439
- );
440
- return false;
441
- }
442
- }
443
- return true;
444
- }
445
-
446
- function prepareTsFile(file) {
447
- // if a class extends another and does not have its own fields no metadata is generated and so no schema can be generated
448
- // in this case replace empty block with the block it inherits from
449
- let codeBlocks = getCodeBlocks(file);
450
- const emptyExtendsBlock = codeBlocks.find((block) => classNameIncludes(block, 'extends') && isBlockEmpty(block));
451
- if (emptyExtendsBlock) {
452
- // replace block and remove extends
453
- let replBlock = `${emptyExtendsBlock}`;
454
- if (replBlock.replace(/\s\s+/g, ' ').trim().startsWith('class OutputProperties')) {
455
- // remove extends
456
- replBlock = replBlock.replace('extends InputProperties', '');
457
- // replace block with InputProperties block
458
- const inputPropsBlock = codeBlocks.find((v) => classNameIncludes(v, 'InputProperties') && !classNameIncludes(v, 'OutputProperties'));
459
- replBlock = replBlock.replace(getBlockContent(replBlock), getBlockContent(inputPropsBlock));
460
-
461
- file = file.replace(emptyExtendsBlock, replBlock);
462
- }
463
- }
464
- return (
465
- `import { validationMetadatasToSchemas as v } from 'class-validator-jsonschema';\n` +
466
- `import { defaultMetadataStorage as classTransformerDefaultMetadataStorage } from 'class-transformer/cjs/storage';\n` +
467
- `${file}\n` +
468
- `const s = v({\n
469
- additionalConverters: {\n
470
- UnitArgsValidator: (meta) => {\n
471
- return {\n
472
- measure: meta.constraints[0],\n
473
- unit: meta.constraints[1],\n
474
- type: 'number',\n
475
- };\n
476
- },\n
477
- },\n
478
- classTransformerMetadataStorage\n
479
- });\n` +
480
- `console.log(JSON.stringify(s));`
481
- );
482
- }
483
-
484
- function getCodeBlocks(str) {
485
- const blocks = [];
486
- let counter = 0;
487
- let start = 0;
488
- let lastNewline = 0;
489
- [...str].forEach((char, index) => {
490
- if (char === '\n') {
491
- lastNewline = index;
492
- }
493
- if (char === '{') {
494
- if (counter === 0) {
495
- // first bracket of block
496
- start = lastNewline;
497
- }
498
- counter++;
499
- } else if (char === '}') {
500
- counter--;
501
- if (counter === 0) {
502
- // last bracket of block
503
- blocks.push(str.substring(start, index + 1));
504
- }
505
- }
506
- });
507
- return blocks;
508
- }
509
-
510
- function classNameIncludes(str, className) {
511
- return str.trim().split('\n', 1)[0].includes(className);
512
- }
513
-
514
- function getBlockContent(block) {
515
- return block.substring(block.indexOf('{'), block.lastIndexOf('}') + 1);
516
- }
517
-
518
- function isBlockEmpty(block) {
519
- const blockContent = block.substring(block.indexOf('{') + 1, block.lastIndexOf('}'));
520
- return !blockContent.trim();
521
- }
522
-
523
- function getTypes(filePath) {
524
- try {
525
- const json = require(path.join(process.cwd(), filePath));
526
- return json.definitions ? Object.keys(json.definitions) : [];
527
- } catch (e) {
528
- return [];
529
- }
352
+ const result = await execa('ts-node', ['-T', '--dir', directory], { input: prepareTsFile(tsFile), preferLocal: true });
353
+ json = await handleConvertedOutput(result.stdout, jsonPath, json);
354
+ await fs.promises.writeFile(path.join(process.cwd(), jsonPath), JSON.stringify(json, null, 2) + '\n');
530
355
  }
531
356
 
532
357
  async function clean(buildFolder) {
533
358
  return new Promise((resolve, reject) => {
534
359
  const spinner = getSpinner('Cleaning').start();
535
- fs.rmdir(buildFolder, { recursive: true }, (err) => {
536
- if (err) {
360
+ fs.rm(buildFolder, { recursive: true, force: true }, (error) => {
361
+ if (error) {
537
362
  spinner.stop();
538
- log(error('Cleaning failed'));
539
- log(error(err));
540
- return reject(err);
363
+ logger.error('Cleaning failed');
364
+ logger.error(error);
365
+ return reject(error);
541
366
  } else {
542
367
  spinner.stop();
543
- log(ok('Cleaning successful'));
368
+ logger.ok('Cleaning successful');
544
369
  return resolve();
545
370
  }
546
371
  });
@@ -550,13 +375,12 @@ async function clean(buildFolder) {
550
375
  function exec(cmd, project) {
551
376
  return new Promise((resolve, reject) => {
552
377
  if (!project) {
553
- log(`${chalk.red('Wrong command options.')} Type "hpc ${cmd} --help" to see how to use this command`);
554
378
  return reject();
555
379
  }
556
380
 
557
381
  const options = { ...getProcessOptions(cmd, project), env: process.env };
558
382
  if (cmd === CMD.RUN || cmd === CMD.WATCH) {
559
- log(ok(`\n${getLabel(cmd)} ${project.name}:\n`));
383
+ logger.ok(`\n${getLabel(cmd)} ${project.name}:\n`);
560
384
  execa(getProcess(cmd), getProcessArguments(cmd, project), options).stdout.pipe(process.stdout);
561
385
  } else {
562
386
  const spinner = getSpinner(`${getLabel(cmd)} ${project.name}`);
@@ -564,76 +388,66 @@ function exec(cmd, project) {
564
388
  execa(getProcess(cmd), getProcessArguments(cmd, project), options)
565
389
  .then((result) => {
566
390
  spinner.stop();
567
- log(result.stdout);
568
- log(ok(`${getLabel(cmd)} Succeeded`));
391
+ logger.log(result.stdout);
392
+ logger.ok(`${getLabel(cmd)} Succeeded`);
569
393
  return resolve();
570
394
  })
571
- .catch((err) => {
395
+ .catch((error) => {
572
396
  spinner.stop();
573
- if (err.stderr) log(error(err.stderr));
574
- else log(error(err));
575
- if (err.stdout) log(err.stdout);
576
- log(error(`${getLabel(cmd)} Failed`));
397
+ if (error.stderr) logger.error(error.stderr);
398
+ else logger.error(error);
399
+ if (error.stdout) logger.log(error.stdout);
400
+ logger.error(`${getLabel(cmd)} Failed`);
577
401
  return reject();
578
402
  });
579
403
  }
580
404
  });
581
405
  }
582
406
 
583
- function isDir(p) {
584
- return new Promise((res, rej) => {
585
- fs.lstat(p, (err, stats) => {
586
- if (!err && stats) {
587
- res(stats.isDirectory());
407
+ function isDirectory(p) {
408
+ return new Promise((resolve) => {
409
+ fs.lstat(p, (error, stats) => {
410
+ if (!error && stats) {
411
+ resolve(stats.isDirectory());
588
412
  } else {
589
- res(false);
413
+ resolve(false);
590
414
  }
591
415
  });
592
416
  });
593
417
  }
594
418
 
595
419
  async function findProjects() {
596
- const readDir = (dir) =>
597
- new Promise((res, rej) => {
598
- fs.readdir(dir, (err, files) => {
599
- if (!err && files) {
600
- res(files);
601
- } else {
602
- res([]);
603
- }
604
- });
605
- });
606
- const isProject = (dir) =>
607
- new Promise((res, rej) => {
608
- fs.access(path.join(dir, 'package.json'), (err) => {
609
- if (!err) {
610
- res(true);
420
+ const isProject = (directory) =>
421
+ new Promise((resolve) => {
422
+ fs.access(path.join(directory, 'package.json'), (error) => {
423
+ if (!error) {
424
+ resolve(true);
611
425
  } else {
612
- res(false);
426
+ resolve(false);
613
427
  }
614
428
  });
615
429
  });
616
430
 
617
- const rootPkg = await readJson(path.join(process.cwd(), 'package.json'));
431
+ const rootPackage = await readJson(path.join(process.cwd(), 'package.json'));
618
432
 
619
433
  const projects = [];
620
- const files = await readDir(projectsRoot);
434
+ const files = await fs.promises.readdir(projectsRoot);
621
435
  if (files) {
622
436
  for (const file of files) {
623
- if (file && (await isDir(path.join(projectsRoot, file)))) {
437
+ if (file && (await isDirectory(path.join(projectsRoot, file)))) {
624
438
  const projectPath = path.join(projectsRoot, file, 'package.json');
625
439
  if (await isProject(path.join(projectsRoot, file))) {
626
440
  try {
627
- const pkg = await readJson(path.join(path.dirname(projectPath), 'package.json'));
628
- pkg.location = path.posix.join(projectsRoot, file);
629
- pkg.dist = path.posix.join(process.cwd(), buildDir, file);
630
- if (rootPkg) {
631
- pkg.dependencies = { ...pkg.dependencies, ...rootPkg.dependencies };
632
- pkg.repository = rootPkg.repository;
441
+ const package_ = await readJson(path.join(path.dirname(projectPath), 'package.json'));
442
+ package_.location = path.posix.join(projectsRoot, file);
443
+ package_.dist = path.posix.join(process.cwd(), buildDirectory, file);
444
+ if (rootPackage) {
445
+ package_.dependencies = { ...package_.dependencies, ...rootPackage.dependencies };
446
+ package_.repository = rootPackage.repository;
633
447
  }
634
- projects.push(pkg);
635
- } catch (err) {
636
- if (err) log(err);
448
+ projects.push(package_);
449
+ } catch (error) {
450
+ if (error) logger.log(error);
637
451
  }
638
452
  }
639
453
  }
@@ -646,7 +460,7 @@ async function findProjects() {
646
460
  function findProject(projectName) {
647
461
  return new Promise(async (resolve, reject) => {
648
462
  if (!projectName) {
649
- log(error('No project specified'));
463
+ logger.error('No project specified');
650
464
  return reject();
651
465
  }
652
466
  if (projectName === 'all') {
@@ -661,13 +475,13 @@ function findProject(projectName) {
661
475
  const projects = await findProjects();
662
476
  for (const project of projects) {
663
477
  const location = path.parse(project.location);
664
- const dirName = location.name + location.ext;
665
- if (project.name === projectName || dirName === projectName) {
478
+ const directoryName = location.name + location.ext;
479
+ if (project.name === projectName || directoryName === projectName) {
666
480
  return resolve(project);
667
481
  }
668
482
  }
669
483
 
670
- log(error(`Cloud not find ${projectName} Module.`));
484
+ logger.error(`Cloud not find ${projectName} Module.`);
671
485
  reject();
672
486
  });
673
487
  }
@@ -675,32 +489,32 @@ function findProject(projectName) {
675
489
  async function getAccessToken() {
676
490
  return new Promise(async (resolve, reject) => {
677
491
  try {
678
- const params = new URLSearchParams([
492
+ const parameters = new URLSearchParams([
679
493
  ['client_id', apiUser],
680
494
  ['client_secret', apiKey],
681
495
  ['grant_type', 'client_credentials'],
682
496
  ]);
683
497
  const headers = { 'Content-Type': 'application/x-www-form-urlencoded' };
684
- const response = (await axios.post(authUrl, params.toString(), { headers })).data;
498
+ const response = await axios.post(authUrl, parameters.toString(), { headers });
499
+ const data = response.data;
685
500
 
686
- if (!response || !response.access_token) {
687
- throw new Error();
501
+ if (!data || !data.access_token) {
502
+ throw new Error('Could not get AccessToken');
688
503
  }
689
- apiToken = response.access_token;
690
- log(ok('AccessToken acquired'));
504
+ apiToken = data.access_token;
505
+ logger.ok('AccessToken acquired');
691
506
  return resolve();
692
- } catch (err) {
693
- log(error('Could not get AccessToken'));
694
- handleApiError(err);
507
+ } catch (error) {
508
+ handleApiError(error);
695
509
  return reject();
696
510
  }
697
511
  });
698
512
  }
699
513
 
700
514
  async function packageModule(project) {
701
- const { location, dist, ...package } = project;
515
+ const { dist, ...package_ } = project;
702
516
  const file = path.posix.join(dist, '..', `${project.name}.zip`);
703
- await writeJson(path.join(dist, 'package.json'), package);
517
+ await writeJson(path.join(dist, 'package.json'), package_);
704
518
  await zipDirectory(dist, file);
705
519
  return file;
706
520
  }
@@ -723,10 +537,10 @@ async function publishModule(project) {
723
537
  },
724
538
  });
725
539
 
726
- log(ok(`Module "${project.name}" published!`));
540
+ logger.ok(`Module "${project.name}" published!`);
727
541
  return resolve();
728
- } catch (err) {
729
- return reject(err);
542
+ } catch (error) {
543
+ return reject(error);
730
544
  } finally {
731
545
  deleteFile(file);
732
546
  }
@@ -738,18 +552,18 @@ async function validateModule(project) {
738
552
  const moduleName = Reflect.getMetadata('module:name', module.default);
739
553
  const moduleDeclarations = Reflect.getMetadata('module:declarations', module.default);
740
554
 
741
- const funcFqns = [];
555
+ const functionFqns = [];
742
556
  for (const declaration of moduleDeclarations) {
743
557
  const fqn = Reflect.getMetadata('element:functionFqn', declaration);
744
558
  if (!fqn) {
745
559
  throw new Error(`FlowFunction (${declaration.name}) metadata is missing or invalid.`);
746
560
  }
747
- funcFqns.push(fqn);
561
+ functionFqns.push(fqn);
748
562
  }
749
563
 
750
564
  if (moduleName) {
751
565
  project.name = moduleName;
752
- project.functions = funcFqns;
566
+ project.functions = functionFqns;
753
567
  } else {
754
568
  throw new Error('Could not validate module name');
755
569
  }
@@ -761,9 +575,9 @@ async function publishFunctions(project, update) {
761
575
  cwd: project.location,
762
576
  ignore: ['node_modules/**/*', '**/package*.json', '**/tsconfig*.json'],
763
577
  };
764
- glob('**/*.json', globOptions, async (err, files) => {
765
- if (err) {
766
- return reject(err);
578
+ glob('**/*.json', globOptions, async (error, files) => {
579
+ if (error) {
580
+ return reject(error);
767
581
  }
768
582
  const headers = { Authorization: `Bearer ${apiToken}` };
769
583
 
@@ -775,23 +589,23 @@ async function publishFunctions(project, update) {
775
589
  if (update) {
776
590
  try {
777
591
  await axios.put(`${baseUrl}/api/flow/functions/${json.fqn}`, json, { headers });
778
- log(ok(`Flow Function "${json.fqn}" has been updated`));
779
- } catch (err) {
780
- log(error(`Flow Function "${json.fqn}" could not be updated`));
781
- handleApiError(err);
592
+ logger.ok(`Flow Function "${json.fqn}" has been updated`);
593
+ } catch (error) {
594
+ logger.error(`Flow Function "${json.fqn}" could not be updated`);
595
+ handleApiError(error);
782
596
  }
783
597
  } else {
784
598
  try {
785
599
  await axios.post(`${baseUrl}/api/flow/functions`, json, { headers });
786
- log(ok(`Flow Function "${json.fqn}" has been created`));
787
- } catch (err) {
788
- log(error(`Flow Function "${json.fqn}" could not be created`));
789
- handleApiError(err);
600
+ logger.ok(`Flow Function "${json.fqn}" has been created`);
601
+ } catch (error) {
602
+ logger.error(`Flow Function "${json.fqn}" could not be created`);
603
+ handleApiError(error);
790
604
  }
791
605
  }
792
606
  }
793
- } catch (err) {
794
- log(error(err));
607
+ } catch (error) {
608
+ logger.error(error);
795
609
  }
796
610
  }
797
611
  return resolve();
@@ -799,14 +613,14 @@ async function publishFunctions(project, update) {
799
613
  });
800
614
  }
801
615
 
802
- function handleApiError(err) {
803
- if (err.isAxiosError && err.response) {
804
- log(error(`${err.response.status} ${err.response.statusText}`));
805
- if (err.response.data) {
806
- log(error(JSON.stringify(err.response.data)));
616
+ function handleApiError(error) {
617
+ if (error.isAxiosError && error.response) {
618
+ logger.error(`${error.response.status} ${error.response.statusText}`);
619
+ if (error.response.data) {
620
+ logger.error(JSON.stringify(error.response.data));
807
621
  }
808
622
  } else {
809
- log(error(err));
623
+ logger.error(error);
810
624
  }
811
625
  }
812
626
 
@@ -817,7 +631,7 @@ function zipDirectory(source, out) {
817
631
  return new Promise((resolve, reject) => {
818
632
  archive
819
633
  .directory(source, false)
820
- .on('error', (err) => reject(err))
634
+ .on('error', (error) => reject(error))
821
635
  .pipe(stream);
822
636
 
823
637
  stream.on('close', () => resolve());
@@ -827,8 +641,8 @@ function zipDirectory(source, out) {
827
641
 
828
642
  function deleteFile(path) {
829
643
  return new Promise((resolve, reject) => {
830
- fs.unlink(path, (err) => {
831
- if (err) return reject(err);
644
+ fs.unlink(path, (error) => {
645
+ if (error) return reject(error);
832
646
  return resolve();
833
647
  });
834
648
  });
@@ -859,14 +673,14 @@ function getProcess(cmd) {
859
673
  function copyProjectFiles(project) {
860
674
  return new Promise((resolve, reject) => {
861
675
  copyfiles(
862
- [`${project.location}/**`, `${buildDir}/`],
676
+ [`${project.location}/**`, `${buildDirectory}/`],
863
677
  {
864
678
  exclude: [`${project.location}/*.json`, `${project.location}/**/*.ts`, `${project.location}/**/test/**`],
865
679
  up: 1,
866
680
  },
867
- (err) => {
868
- if (err) {
869
- return reject(err);
681
+ (error) => {
682
+ if (error) {
683
+ return reject(error);
870
684
  }
871
685
  return resolve();
872
686
  },
@@ -928,32 +742,32 @@ function getSpinner(message) {
928
742
 
929
743
  function checkIfAll(projectName) {
930
744
  if (projectName === 'all') {
931
- log(error(`Please specify a project. Command can't be run for all.`));
745
+ logger.error(`Please specify a project. Command can't be run for all.`);
932
746
  return true;
933
747
  }
934
748
  return false;
935
749
  }
936
750
 
937
- function checkEnvModules() {
751
+ function checkEnvironmentModules() {
938
752
  let missing = false;
939
753
  if (!apiUser) {
940
- log(error('"API_USER" env var is not set'));
754
+ logger.error('"API_USER" env var is not set');
941
755
  missing = true;
942
756
  }
943
757
  if (!apiKey) {
944
- log(error('"API_KEY" env var is not set'));
758
+ logger.error('"API_KEY" env var is not set');
945
759
  missing = true;
946
760
  }
947
761
  if (!baseUrl) {
948
- log(error('"PLATFORM_URL" env var is not set'));
762
+ logger.error('"PLATFORM_URL" env var is not set');
949
763
  missing = true;
950
764
  }
951
765
  if (!realm) {
952
- log(error('"REALM" env var is not set'));
766
+ logger.error('"REALM" env var is not set');
953
767
  missing = true;
954
768
  }
955
- if (!buildDir) {
956
- log(error('"BUILD_DIR" env var is not set'));
769
+ if (!buildDirectory) {
770
+ logger.error('"BUILD_DIR" env var is not set');
957
771
  missing = true;
958
772
  }
959
773
  return missing;
@@ -961,12 +775,12 @@ function checkEnvModules() {
961
775
 
962
776
  function readJson(path) {
963
777
  return new Promise((resolve, reject) => {
964
- fs.readFile(path, { encoding: 'utf8' }, (err, data) => {
965
- if (err) return reject(err);
778
+ fs.readFile(path, { encoding: 'utf8' }, (error, data) => {
779
+ if (error) return reject(error);
966
780
  try {
967
781
  return resolve(JSON.parse(data));
968
- } catch (e) {
969
- return reject(e);
782
+ } catch (error) {
783
+ return reject(error);
970
784
  }
971
785
  });
972
786
  });
@@ -977,18 +791,12 @@ function writeJson(path, data) {
977
791
  let dataString;
978
792
  try {
979
793
  dataString = JSON.stringify(data, null, 2) + '\n';
980
- } catch (err) {
981
- return reject(err);
794
+ } catch (error) {
795
+ return reject(error);
982
796
  }
983
- fs.writeFile(path, dataString, (err) => {
984
- if (err) return reject(err);
797
+ fs.writeFile(path, dataString, (error) => {
798
+ if (error) return reject(error);
985
799
  return resolve();
986
800
  });
987
801
  });
988
802
  }
989
-
990
- exports.prepareTsFile = prepareTsFile;
991
- exports.getCodeBlocks = getCodeBlocks;
992
- exports.checkTypes = checkTypes;
993
- exports.getTypes = getTypes;
994
- exports.handleConvertedOutput = handleConvertedOutput;
package/lib/utils.mjs ADDED
@@ -0,0 +1,197 @@
1
+ import fs from 'node:fs/promises';
2
+ import path from 'node:path';
3
+
4
+ const defaultLogger = {
5
+ /* eslint-disable no-console */
6
+ log: console.log,
7
+ error: console.error,
8
+ ok: console.info,
9
+ /* eslint-enable no-console */
10
+ };
11
+
12
+ export function checkTypes(definedTypes, propertiesSchema, jsonPath, logger = defaultLogger) {
13
+ const knownTypes = new Set([
14
+ ...definedTypes,
15
+ 'string',
16
+ 'undefined',
17
+ 'number',
18
+ 'boolean',
19
+ 'any',
20
+ 'object',
21
+ 'array',
22
+ 'integer',
23
+ 'Asset',
24
+ 'AssetType',
25
+ 'Flow',
26
+ 'Secret',
27
+ 'TimeSeries',
28
+ ]);
29
+
30
+ // check if all types are known
31
+ const properties = propertiesSchema.properties || {};
32
+ for (const property of Object.keys(properties)) {
33
+ if (properties[property].type && !knownTypes.has(properties[property].type)) {
34
+ logger.error(
35
+ `ERROR: unknown type ${properties[property].type}.
36
+ Please add a schema for this type in ${jsonPath}
37
+ for more info check the documentation`,
38
+ );
39
+ return false;
40
+ }
41
+ }
42
+ return true;
43
+ }
44
+
45
+ export async function getTypes(filePath) {
46
+ try {
47
+ const json = JSON.parse(await fs.readFile(path.join(process.cwd(), filePath)));
48
+ return json.definitions ? Object.keys(json.definitions) : [];
49
+ } catch {
50
+ return [];
51
+ }
52
+ }
53
+
54
+ export async function handleConvertedOutput(result, jsonPath, json, logger = defaultLogger) {
55
+ let schema;
56
+ try {
57
+ schema = JSON.parse(result);
58
+ } catch {
59
+ logger.error(result);
60
+ return json;
61
+ }
62
+
63
+ const values = [
64
+ ['propertiesSchema', 'Properties'],
65
+ ['inputStreams', 'InputProperties'],
66
+ ['outputStreams', 'OutputProperties'],
67
+ ];
68
+
69
+ for (const value of values) {
70
+ const propertiesSchema = schema[value[1]] || {};
71
+ for (const requestProperty of propertiesSchema.required || []) {
72
+ propertiesSchema.properties[requestProperty] = { ...propertiesSchema.properties[requestProperty], required: true };
73
+ }
74
+ // remove required field
75
+ delete propertiesSchema.required;
76
+
77
+ const types = await getTypes(jsonPath);
78
+ checkTypes(types, propertiesSchema, jsonPath);
79
+
80
+ const completeSchema = {
81
+ schema: {
82
+ type: 'object',
83
+ properties: {
84
+ ...propertiesSchema.properties,
85
+ },
86
+ },
87
+ };
88
+
89
+ if (value[0] === 'propertiesSchema') {
90
+ if (!json['propertiesSchema']) {
91
+ json['propertiesSchema'] = completeSchema;
92
+ }
93
+ } else {
94
+ // check if config for default input/output stream exists
95
+ if (!json[value[0]].some((v) => v.name === 'default') && propertiesSchema) {
96
+ json[value[0]].push({
97
+ name: 'default',
98
+ ...completeSchema,
99
+ });
100
+ }
101
+ }
102
+ }
103
+
104
+ // add definitions
105
+ if (Object.keys(schema).some((key) => !['Properties', 'InputProperties', 'OutputProperties'].includes(key))) {
106
+ const typeDefinitions = Object.keys(schema).filter((key) => !['Properties', 'InputProperties', 'OutputProperties'].includes(key));
107
+ json.definitions = typeDefinitions.reduce((previousValue, currentValue) => {
108
+ const additionalSchema = schema[currentValue];
109
+ for (const requestProperty of additionalSchema.required || []) {
110
+ additionalSchema.properties[requestProperty] = { ...additionalSchema.properties[requestProperty], required: true };
111
+ }
112
+ delete additionalSchema.required;
113
+ previousValue[currentValue] = additionalSchema;
114
+ return previousValue;
115
+ }, {});
116
+ }
117
+ return json;
118
+ }
119
+
120
+ export function prepareTsFile(file) {
121
+ // if a class extends another and does not have its own fields no metadata is generated and so no schema can be generated
122
+ // in this case replace empty block with the block it inherits from
123
+ let codeBlocks = getCodeBlocks(file);
124
+ const emptyExtendsBlock = codeBlocks.find((block) => blockDefinitionIncludes(block, 'extends') && isBlockEmpty(block));
125
+ if (emptyExtendsBlock) {
126
+ // replace block and remove extends
127
+ let replBlock = `${emptyExtendsBlock}`;
128
+ if (replBlock.replace(/\s\s+/g, ' ').trim().startsWith('class OutputProperties')) {
129
+ // remove extends
130
+ replBlock = replBlock.replace('extends InputProperties', '');
131
+ // replace block with InputProperties block
132
+ const inputPropertiesBlock = codeBlocks.find(
133
+ (v) => blockDefinitionIncludes(v, 'InputProperties') && !blockDefinitionIncludes(v, 'OutputProperties'),
134
+ );
135
+ replBlock = replBlock.replace(getBlockContent(replBlock), getBlockContent(inputPropertiesBlock));
136
+
137
+ file = file.replace(emptyExtendsBlock, replBlock);
138
+ }
139
+ }
140
+ return (
141
+ `import { validationMetadatasToSchemas as v } from 'class-validator-jsonschema';\n` +
142
+ `import { defaultMetadataStorage as classTransformerDefaultMetadataStorage } from 'class-transformer/cjs/storage';\n` +
143
+ `${file}\n` +
144
+ `const s = v({\n
145
+ additionalConverters: {\n
146
+ UnitArgsValidator: (meta) => {\n
147
+ return {\n
148
+ measure: meta.constraints[0],\n
149
+ unit: meta.constraints[1],\n
150
+ type: 'number',\n
151
+ };\n
152
+ },\n
153
+ },\n
154
+ classTransformerMetadataStorage\n
155
+ });\n` +
156
+ `console.log(JSON.stringify(s));`
157
+ );
158
+ }
159
+
160
+ export function getCodeBlocks(string_) {
161
+ const blocks = [];
162
+ let counter = 0;
163
+ let start = 0;
164
+ let lastNewline = 0;
165
+ for (const [index, char] of [...string_].entries()) {
166
+ if (char === '\n') {
167
+ lastNewline = index;
168
+ }
169
+ if (char === '{') {
170
+ if (counter === 0) {
171
+ // first bracket of block
172
+ start = lastNewline;
173
+ }
174
+ counter++;
175
+ } else if (char === '}') {
176
+ counter--;
177
+ if (counter === 0) {
178
+ // last bracket of block
179
+ blocks.push(string_.slice(start, index + 1));
180
+ }
181
+ }
182
+ }
183
+ return blocks;
184
+ }
185
+
186
+ function blockDefinitionIncludes(block, value) {
187
+ return block.trim().split('\n', 1)[0].includes(value);
188
+ }
189
+
190
+ function getBlockContent(block) {
191
+ return block.slice(block.indexOf('{'), block.lastIndexOf('}') + 1);
192
+ }
193
+
194
+ function isBlockEmpty(block) {
195
+ const blockContent = block.slice(block.indexOf('{') + 1, block.lastIndexOf('}'));
196
+ return !blockContent.trim();
197
+ }
package/package.json CHANGED
@@ -1,18 +1,19 @@
1
1
  {
2
2
  "name": "@hahnpro/flow-cli",
3
- "version": "2.11.0",
3
+ "version": "2.12.2",
4
4
  "description": "CLI for managing Flow Modules",
5
5
  "license": "MIT",
6
+ "type": "module",
6
7
  "author": {
7
8
  "name": "Hahn Projects GmbH",
8
9
  "url": "https://hahnpro.com"
9
10
  },
10
11
  "repository": {
11
12
  "type": "git",
12
- "url": "git@gitlab.com:hahnpro/flow.git"
13
+ "url": "git@github.com:hahnprojects/flow.git"
13
14
  },
14
15
  "bin": {
15
- "flow": "lib/cli.js"
16
+ "flow": "lib/cli.mjs"
16
17
  },
17
18
  "directories": {
18
19
  "lib": "lib",
@@ -26,32 +27,53 @@
26
27
  },
27
28
  "dependencies": {
28
29
  "archiver": "^5.3.0",
29
- "axios": "^0.23.0",
30
- "chalk": "^4.1.2",
31
- "class-transformer": "0.4.0",
32
- "class-validator": "~0.13.1",
30
+ "axios": "^0.26.0",
31
+ "chalk": "^5.0.0",
32
+ "class-transformer": "0.5.1",
33
+ "class-validator": "~0.13.2",
33
34
  "class-validator-jsonschema": "^3.1.0",
34
- "commander": "^8.2.0",
35
+ "commander": "^9.0.0",
35
36
  "copyfiles": "^2.4.1",
36
- "dotenv": "^10.0.0",
37
- "execa": "^5.1.1",
37
+ "dotenv": "^16.0.0",
38
+ "execa": "^6.1.0",
38
39
  "form-data": "^4.0.0",
39
40
  "glob": "^7.2.0",
40
41
  "https-proxy-agent": "^5.0.0",
41
- "ora": "^5.4.1",
42
+ "ora": "^6.1.0",
42
43
  "reflect-metadata": "^0.1.13",
43
- "ts-node": "^10.3.0"
44
+ "ts-node": "^10.5.0"
44
45
  },
45
46
  "devDependencies": {
46
- "@types/jest": "^27.0.2",
47
- "@types/node": "^14.17.22",
48
- "jest": "^27.2.5",
49
- "prettier": "^2.4.1",
50
- "ts-jest": "^27.0.5",
51
- "typescript": "^4.4.4"
47
+ "@types/jest": "^27.4.1",
48
+ "@types/node": "^16.11.25",
49
+ "eslint": "^8.9.0",
50
+ "eslint-plugin-unicorn": "^41.0.0",
51
+ "jest": "^27.5.1",
52
+ "prettier": "^2.5.1",
53
+ "typescript": "^4.5.5"
52
54
  },
53
55
  "engines": {
54
- "node": ">=v14.13"
56
+ "node": "^14.13.1 || >=16.0.0"
55
57
  },
56
- "scripts": {}
58
+ "eslintConfig": {
59
+ "extends": [
60
+ "eslint:recommended",
61
+ "prettier",
62
+ "plugin:prettier/recommended",
63
+ "plugin:unicorn/all"
64
+ ],
65
+ "rules": {
66
+ "unicorn/no-array-reduce": "off",
67
+ "unicorn/no-null": "off",
68
+ "unicorn/prefer-object-from-entries": "off",
69
+ "no-async-promise-executor": "off",
70
+ "no-console": "error"
71
+ }
72
+ },
73
+ "scripts": {
74
+ "format": "prettier --write .",
75
+ "lint": "eslint '*/**/*.{js,mjs}'",
76
+ "test": "jest"
77
+ },
78
+ "readme": "# `@hahnpro/flow-cli`\n\nhttps://github.com/hahnprojects/flow\n\n# Commands\n\n## `build [projectName]`\n\nBuilds specified Project.\n\n## `install [projectName]`\n\nInstalls the dependencies of the specified Project.\n\n## `format`\n\nFormats all typescript files according to prettier configuration.\n\n## `name [projectName]`\n\nInstalls Dependencies and Builds the specified Project.\n\n## `package [projectName]`\n\nBuilds specified Module and packages it as .zip File for manual upload to the platform.\n\n## `publish-module [projectName]`\n\nPublishes specified Module to Cloud Platform.\n\n- `-f`, `--functions` Publish flow functions.\n- `-u`, `--update` Update existing flow functions.\n\n## `publish-functions [projectName]`\n\nPublishes all Flow Functions inside specified Module to Cloud Platform.\n\n- `-u`, `--update` Update existing flow functions.\n\n## `serve [projectName]`\n\nBuilds and serves your Project. Rebuilding on file changes.\n\n## `start [projectName]`\n\nRuns your project.\n\n## `test [projectName]`\n\nRuns tests for your Project.\n\n## `generate-schemas [projectName]`\n\nGenerates Input, Output and Properties-Schemas for the specified project.\n\n- `--verbose` Output more information about what is being done.\n- `-h`, `--hide` Hide warnings if Input/OutputProperties classes can´t be found.\n This command generates the schemas and puts them in the `inputStreams` and `outputStreams`\n fields in the json-files of each Flow-Function. It always assumes the properties defined\n in the `Input/OutPutProperties` classes are meant for the default input/output streams.\n If your Function uses different streams you may have to change stream name manually.\n"
57
79
  }