@sw-tsdk/plugin-connector 3.13.1 → 3.13.2-next.3dfd44a

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/README.md +18 -18
  2. package/lib/commands/connector/build.js +168 -44
  3. package/lib/commands/connector/build.js.map +1 -1
  4. package/lib/commands/connector/sign.js +108 -12
  5. package/lib/commands/connector/sign.js.map +1 -1
  6. package/lib/commands/connector/validate.js +110 -10
  7. package/lib/commands/connector/validate.js.map +1 -1
  8. package/lib/commands/migrator/convert.d.ts +3 -0
  9. package/lib/commands/migrator/convert.js +201 -20
  10. package/lib/commands/migrator/convert.js.map +1 -1
  11. package/lib/templates/migrator-runners/plugin_override.txt +76 -4
  12. package/lib/templates/migrator-runners/runner_override.txt +30 -0
  13. package/lib/templates/migrator-runners/script_override.txt +77 -5
  14. package/lib/templates/swimlane/__init__.py +18 -0
  15. package/lib/templates/swimlane/core/__init__.py +0 -0
  16. package/lib/templates/swimlane/core/adapters/__init__.py +10 -0
  17. package/lib/templates/swimlane/core/adapters/app.py +59 -0
  18. package/lib/templates/swimlane/core/adapters/app_revision.py +49 -0
  19. package/lib/templates/swimlane/core/adapters/helper.py +84 -0
  20. package/lib/templates/swimlane/core/adapters/record.py +468 -0
  21. package/lib/templates/swimlane/core/adapters/record_revision.py +43 -0
  22. package/lib/templates/swimlane/core/adapters/report.py +65 -0
  23. package/lib/templates/swimlane/core/adapters/task.py +58 -0
  24. package/lib/templates/swimlane/core/adapters/usergroup.py +183 -0
  25. package/lib/templates/swimlane/core/bulk.py +48 -0
  26. package/lib/templates/swimlane/core/cache.py +165 -0
  27. package/lib/templates/swimlane/core/client.py +466 -0
  28. package/lib/templates/swimlane/core/cursor.py +100 -0
  29. package/lib/templates/swimlane/core/fields/__init__.py +46 -0
  30. package/lib/templates/swimlane/core/fields/attachment.py +82 -0
  31. package/lib/templates/swimlane/core/fields/base/__init__.py +15 -0
  32. package/lib/templates/swimlane/core/fields/base/cursor.py +90 -0
  33. package/lib/templates/swimlane/core/fields/base/field.py +149 -0
  34. package/lib/templates/swimlane/core/fields/base/multiselect.py +116 -0
  35. package/lib/templates/swimlane/core/fields/comment.py +48 -0
  36. package/lib/templates/swimlane/core/fields/datetime.py +112 -0
  37. package/lib/templates/swimlane/core/fields/history.py +28 -0
  38. package/lib/templates/swimlane/core/fields/list.py +266 -0
  39. package/lib/templates/swimlane/core/fields/number.py +38 -0
  40. package/lib/templates/swimlane/core/fields/reference.py +169 -0
  41. package/lib/templates/swimlane/core/fields/text.py +30 -0
  42. package/lib/templates/swimlane/core/fields/tracking.py +10 -0
  43. package/lib/templates/swimlane/core/fields/usergroup.py +137 -0
  44. package/lib/templates/swimlane/core/fields/valueslist.py +70 -0
  45. package/lib/templates/swimlane/core/resolver.py +46 -0
  46. package/lib/templates/swimlane/core/resources/__init__.py +0 -0
  47. package/lib/templates/swimlane/core/resources/app.py +136 -0
  48. package/lib/templates/swimlane/core/resources/app_revision.py +43 -0
  49. package/lib/templates/swimlane/core/resources/attachment.py +64 -0
  50. package/lib/templates/swimlane/core/resources/base.py +55 -0
  51. package/lib/templates/swimlane/core/resources/comment.py +33 -0
  52. package/lib/templates/swimlane/core/resources/record.py +499 -0
  53. package/lib/templates/swimlane/core/resources/record_revision.py +44 -0
  54. package/lib/templates/swimlane/core/resources/report.py +259 -0
  55. package/lib/templates/swimlane/core/resources/revision_base.py +69 -0
  56. package/lib/templates/swimlane/core/resources/task.py +16 -0
  57. package/lib/templates/swimlane/core/resources/usergroup.py +166 -0
  58. package/lib/templates/swimlane/core/search.py +31 -0
  59. package/lib/templates/swimlane/core/wrappedsession.py +12 -0
  60. package/lib/templates/swimlane/exceptions.py +191 -0
  61. package/lib/templates/swimlane/utils/__init__.py +132 -0
  62. package/lib/templates/swimlane/utils/date_validator.py +4 -0
  63. package/lib/templates/swimlane/utils/list_validator.py +7 -0
  64. package/lib/templates/swimlane/utils/str_validator.py +10 -0
  65. package/lib/templates/swimlane/utils/version.py +101 -0
  66. package/lib/transformers/base-transformer.js +61 -14
  67. package/lib/transformers/base-transformer.js.map +1 -1
  68. package/lib/transformers/connector-generator.d.ts +104 -2
  69. package/lib/transformers/connector-generator.js +1234 -51
  70. package/lib/transformers/connector-generator.js.map +1 -1
  71. package/lib/types/migrator-types.d.ts +22 -0
  72. package/lib/types/migrator-types.js.map +1 -1
  73. package/oclif.manifest.json +1 -1
  74. package/package.json +6 -6
@@ -4,19 +4,261 @@ exports.ConnectorGenerator = void 0;
4
4
  const tslib_1 = require("tslib");
5
5
  const connector_interfaces_1 = require("@swimlane/connector-interfaces");
6
6
  const node_fs_1 = require("node:fs");
7
+ const promises_1 = require("node:fs/promises");
7
8
  const node_path_1 = require("node:path");
9
+ const node_os_1 = require("node:os");
8
10
  const js_yaml_1 = tslib_1.__importDefault(require("js-yaml"));
9
11
  const adm_zip_1 = tslib_1.__importDefault(require("adm-zip"));
12
+ /**
13
+ * Map asset.json inputParameter type (numeric) to JSON Schema type.
14
+ * 1: text, 2: text area, 3: code, 4: password, 5: list, 6: number, 7: boolean
15
+ */
16
+ function assetInputTypeToSchemaType(typeCode) {
17
+ switch (typeCode) {
18
+ case 5: {
19
+ return 'array';
20
+ }
21
+ case 6: {
22
+ return 'number';
23
+ }
24
+ case 7: {
25
+ return 'boolean';
26
+ }
27
+ default: {
28
+ return 'string';
29
+ }
30
+ }
31
+ }
32
+ /**
33
+ * Map applicationInfo field type (and selectionType for valuesList) to JSON Schema type.
34
+ * reference, attachment, list, multi-select, checkbox -> array
35
+ * text, single select -> string
36
+ * numeric -> number
37
+ */
38
+ function applicationFieldTypeToSchemaType(field) {
39
+ const ft = (field.fieldType || '').toLowerCase();
40
+ if (ft === 'numeric')
41
+ return 'number';
42
+ if (ft === 'reference' || ft === 'attachment' || ft === 'list')
43
+ return 'array';
44
+ if (ft === 'valueslist') {
45
+ const st = (field.selectionType || '').toLowerCase();
46
+ return st === 'multi' ? 'array' : 'string';
47
+ }
48
+ if (ft === 'checkbox')
49
+ return 'array';
50
+ return 'string';
51
+ }
52
+ /**
53
+ * Packages that should be excluded from requirements.txt
54
+ * - Provided by the runtime (requests, swimlane)
55
+ * - Installed via other means (swimbundle_utils via compile.sh)
56
+ */
57
+ const EXCLUDED_PACKAGES = new Set([
58
+ 'requests',
59
+ 'swimlane',
60
+ 'swimbundle_utils',
61
+ 'dominions',
62
+ 'ssdeep',
63
+ 'pymongo',
64
+ 'poplib', // No viable package
65
+ ]);
66
+ /** Packages that when excluded from requirements.txt should be installed via runner.sh (apt/pip). */
67
+ const RUNNER_EXCLUDED_PACKAGES = new Set(['ssdeep']);
68
+ /**
69
+ * Common import-module to PyPI package mapping.
70
+ * Import names do not always match the installable package name.
71
+ */
72
+ const IMPORT_TO_PYPI_PACKAGE = new Map([
73
+ ['bs4', 'beautifulsoup4'],
74
+ ['cv2', 'opencv-python'],
75
+ ['crypto', 'pycryptodome'],
76
+ ['cryptodome', 'pycryptodome'],
77
+ ['dateutil', 'python_dateutil'],
78
+ ['dns', 'dnspython'],
79
+ ['fitz', 'pymupdf'],
80
+ ['googleapiclient', 'google-api-python-client'],
81
+ ['jwt', 'pyjwt'],
82
+ ['pil', 'pillow'],
83
+ ['sklearn', 'scikit-learn'],
84
+ ['yaml', 'pyyaml'],
85
+ ['magic', 'python_magic'],
86
+ ['mailparser', 'mail-parser'],
87
+ ['dateutil', 'python-dateutil'],
88
+ ['falconpy', 'crowdstrike-falconpy'],
89
+ ['azure', 'azure-storage-blob'],
90
+ ]);
91
+ function resolvePypiPackageFromImport(importName) {
92
+ const normalizedImport = importName.trim().toLowerCase();
93
+ if (!normalizedImport)
94
+ return '';
95
+ return IMPORT_TO_PYPI_PACKAGE.get(normalizedImport) ?? normalizedImport;
96
+ }
97
+ /**
98
+ * Packages that should have their version constraints stripped.
99
+ * These packages commonly have compatibility issues when migrating from Python 3.7 to 3.11+
100
+ * because older pinned versions don't have wheels for newer Python versions.
101
+ * By stripping the version, pip can resolve a compatible version automatically.
102
+ *
103
+ * IMPORTANT: All package names MUST be lowercase since deduplicateRequirements()
104
+ * converts package names to lowercase before checking against this set.
105
+ */
106
+ const PACKAGES_TO_STRIP_VERSION = new Set([
107
+ // Core scientific/data packages with C extensions
108
+ 'numpy',
109
+ 'scipy',
110
+ 'pandas',
111
+ // Cryptography packages with C/Rust extensions
112
+ 'cryptography',
113
+ 'cffi',
114
+ 'pycryptodome',
115
+ 'pycryptodomex',
116
+ // NLP/ML packages (spacy ecosystem)
117
+ 'spacy',
118
+ 'thinc',
119
+ 'blis',
120
+ 'cymem',
121
+ 'preshed',
122
+ 'murmurhash',
123
+ 'srsly',
124
+ // Other packages with C extensions or version-specific wheels
125
+ 'regex',
126
+ 'lxml',
127
+ 'pillow',
128
+ 'psycopg2',
129
+ 'psycopg2-binary',
130
+ 'grpcio',
131
+ 'protobuf',
132
+ 'pyzmq',
133
+ 'greenlet',
134
+ 'gevent',
135
+ 'markupsafe',
136
+ 'pyyaml',
137
+ 'ruamel.yaml',
138
+ 'msgpack',
139
+ 'ujson',
140
+ 'orjson',
141
+ // Packages that may have compatibility issues
142
+ 'typed-ast',
143
+ 'dataclasses',
144
+ 'importlib-metadata',
145
+ 'importlib_metadata',
146
+ 'zipp',
147
+ 'typing-extensions',
148
+ 'typing_extensions',
149
+ 'python_magic',
150
+ 'pgpy',
151
+ 'aiohttp',
152
+ 'yarl',
153
+ 'frozenlist',
154
+ 'geoip2',
155
+ 'extract_msg',
156
+ 'click',
157
+ 'ioc_finder',
158
+ 'tzlocal',
159
+ 'pydantic',
160
+ // Swimlane packages
161
+ 'datetime_parser',
162
+ 'email_master',
163
+ 'sw_aqueduct',
164
+ ]);
10
165
  class ConnectorGenerator {
166
+ static truncateActionDescription(description) {
167
+ if (description.length <= this.ACTION_DESCRIPTION_MAX_LENGTH)
168
+ return description;
169
+ return description.slice(0, this.ACTION_DESCRIPTION_MAX_LENGTH);
170
+ }
171
+ /**
172
+ * For inputs with Type "record", resolve ValueType from applicationInfo.fields (field id = input.Value).
173
+ * Call after transform() so action YAML and temp_inputs get correct types.
174
+ */
175
+ static patchRecordInputTypes(transformationResult, applicationInfo) {
176
+ if (!applicationInfo?.fields?.length)
177
+ return;
178
+ const fieldById = new Map(applicationInfo.fields.filter(f => f.id).map(f => [f.id, f]));
179
+ for (const input of transformationResult.inputs) {
180
+ const typeRaw = input.Type ?? input.type;
181
+ if (String(typeRaw).toLowerCase() !== 'record')
182
+ continue;
183
+ const fieldId = input.Value ?? input.value;
184
+ if (typeof fieldId !== 'string')
185
+ continue;
186
+ const field = fieldById.get(fieldId);
187
+ if (field) {
188
+ input.ValueType = applicationFieldTypeToSchemaType(field);
189
+ if (input.ValueType === 'array') {
190
+ input.arrayItemType = (field.fieldType || '').toLowerCase();
191
+ input.arrayItemValueType = (field.inputType || '').toLowerCase();
192
+ }
193
+ }
194
+ }
195
+ }
196
+ /**
197
+ * For outputs that map to date fields (by Value = field id in applicationInfo), compute which keys need
198
+ * conversion to ISO8601 and the timetype/format for each. Sets transformationResult.outputDateConversions.
199
+ * Uses the application denoted by each output block's ApplicationId (same level as Mappings) when present,
200
+ * otherwise the current application (taskApplicationId).
201
+ * - DataFormat "Standard" -> no conversion.
202
+ * - DataFormat "Unix EPOCH" -> timetype = UnixEpochUnit (seconds or milliseconds).
203
+ * - DataFormat "custom" -> timetype = customDataFormat.
204
+ * - Otherwise -> timetype = DataFormat (treat as custom format string).
205
+ */
206
+ static patchOutputDateConversions(transformationResult, applicationInfoMap, currentApplicationId) {
207
+ transformationResult.outputDateConversions = [];
208
+ if (!applicationInfoMap || !transformationResult.outputs?.length)
209
+ return;
210
+ const keyToTimetype = new Map();
211
+ for (const output of transformationResult.outputs) {
212
+ const outputBlockAppId = output.ApplicationId ?? output.applicationId;
213
+ const appId = typeof outputBlockAppId === 'string' ? outputBlockAppId : transformationResult.taskApplicationId ?? currentApplicationId;
214
+ if (typeof appId !== 'string')
215
+ continue;
216
+ const applicationInfo = applicationInfoMap[appId];
217
+ if (!applicationInfo?.fields?.length)
218
+ continue;
219
+ const fieldById = new Map(applicationInfo.fields.filter((f) => f.id).map((f) => [f.id, f]));
220
+ const fieldId = output.Value ?? output.value;
221
+ if (typeof fieldId !== 'string')
222
+ continue;
223
+ const field = fieldById.get(fieldId);
224
+ const fieldType = (field?.fieldType ?? '').toLowerCase();
225
+ if (fieldType !== 'date')
226
+ continue;
227
+ const dataFormat = String(output.DataFormat ?? output.dataFormat ?? '').trim();
228
+ if (dataFormat === '' || dataFormat.toLowerCase() === 'standard')
229
+ continue;
230
+ let timetype;
231
+ if (dataFormat.toLowerCase() === 'unix epoch') {
232
+ timetype = String(output.UnixEpochUnit ?? output.unixEpochUnit ?? 'seconds').toLowerCase();
233
+ }
234
+ else if (dataFormat.toLowerCase() === 'custom') {
235
+ timetype = String(output.customDataFormat ?? output.CustomDataFormat ?? '').trim();
236
+ if (!timetype)
237
+ continue;
238
+ }
239
+ else {
240
+ timetype = dataFormat;
241
+ }
242
+ const key = output.Key;
243
+ if (!keyToTimetype.has(key))
244
+ keyToTimetype.set(key, timetype);
245
+ }
246
+ transformationResult.outputDateConversions = [...keyToTimetype.entries()].map(([key, timetype]) => ({ key, timetype }));
247
+ }
248
+ /**
249
+ * Initializes a forked plugin (copy base code, requirements, asset).
250
+ * Returns the set of runner-excluded package names that were skipped (e.g. ssdeep) so runner.sh can be updated.
251
+ */
11
252
  static async initializeForkedPlugin(transformedExport, fromDirectory, toDirectory) {
12
253
  const { forkedName } = transformedExport;
13
254
  console.log(`Initializing forked plugin: ${forkedName}`);
255
+ const excludedRunnerPackages = await this.generateRequirements(fromDirectory, toDirectory, forkedName);
14
256
  await Promise.all([
15
257
  this.createBaseCode(fromDirectory, toDirectory, forkedName),
16
- this.generateRequirements(fromDirectory, toDirectory, forkedName),
17
258
  this.generateAsset(fromDirectory, toDirectory, forkedName),
18
259
  ]);
19
260
  console.log(`Forked plugin initialized: ${forkedName}`);
261
+ return excludedRunnerPackages;
20
262
  }
21
263
  static async generateLogo(toDirectory) {
22
264
  const templatePath = (0, node_path_1.join)(__dirname, '../templates/migrator-runners/image.png');
@@ -26,13 +268,37 @@ class ConnectorGenerator {
26
268
  }
27
269
  static async generateBaseStructure(toDirectory) {
28
270
  await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'connector', 'config', 'actions'), { recursive: true });
29
- await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'image'));
30
- await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'docs'));
31
- await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'data'));
32
- await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'doc_images'));
33
- await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'connector', 'config', 'assets'));
34
- await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'connector', 'src'));
35
- await this.createFile((0, node_path_1.join)(toDirectory, 'requirements.txt'), '');
271
+ await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'image'), { recursive: true });
272
+ await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'docs'), { recursive: true });
273
+ await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'data'), { recursive: true });
274
+ await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'doc_images'), { recursive: true });
275
+ await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'connector', 'config', 'assets'), { recursive: true });
276
+ await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'connector', 'src'), { recursive: true });
277
+ // Write default requirements
278
+ const defaultRequirements = [
279
+ 'cachetools>=4.2.4',
280
+ 'certifi==2024.7.4',
281
+ 'pendulum==3.0.0',
282
+ 'pyjwt>=2.4.0',
283
+ 'pyuri>=0.3,<0.4',
284
+ // 'requests[security]>=2,<3',
285
+ 'six>=1.12.0',
286
+ 'sortedcontainers==2.4.0',
287
+ 'shortid==0.1.2',
288
+ 'beautifulsoup4',
289
+ 'pandas',
290
+ ].join('\n') + '\n';
291
+ await this.createFile((0, node_path_1.join)(toDirectory, 'requirements.txt'), defaultRequirements);
292
+ // Copy swimlane template to connector/swimlane
293
+ try {
294
+ const swimlaneTemplatePath = (0, node_path_1.join)(__dirname, '../templates/swimlane');
295
+ const swimlaneDestinationPath = (0, node_path_1.join)(toDirectory, 'connector', 'swimlane');
296
+ await this.copyDirectoryRecursive(swimlaneTemplatePath, swimlaneDestinationPath);
297
+ }
298
+ catch (error) {
299
+ console.error(`Error copying swimlane template: ${error}`);
300
+ // Continue even if swimlane template copy fails
301
+ }
36
302
  await this.createFile((0, node_path_1.join)(toDirectory, 'docs', 'CHANGELOG.md'), '');
37
303
  await this.createFile((0, node_path_1.join)(toDirectory, 'docs', 'README.md'), '# Example Readme');
38
304
  await this.createFile((0, node_path_1.join)(toDirectory, 'docs', 'EXTERNAL_README.md'), '# Example External Readme');
@@ -52,18 +318,25 @@ class ConnectorGenerator {
52
318
  }
53
319
  static async generateAction(transformedExport, toDirectory) {
54
320
  let content;
321
+ const outputDateConversions = transformedExport.outputDateConversions;
55
322
  if (transformedExport.type === 'script') {
56
- content = transformedExport.error ? `Error: ${transformedExport.error}` : await this.getActionContentScript(transformedExport.script);
323
+ content = transformedExport.error ? `Error: ${transformedExport.error}` : await this.getActionContentScript(transformedExport.script, transformedExport.inputs, outputDateConversions);
57
324
  }
58
325
  else {
59
- content = transformedExport.error ? `Error: ${transformedExport.error}` : await this.getActionContentFork(transformedExport.script);
326
+ content = transformedExport.error ? `Error: ${transformedExport.error}` : await this.getActionContentFork(transformedExport.script, transformedExport.inputs, outputDateConversions);
60
327
  }
328
+ content = this.replaceTaskExecuteRequestCall(content);
61
329
  const outputPath = (0, node_path_1.join)(toDirectory, 'connector', 'src', `${transformedExport.exportUid}.py`);
62
330
  await this.createFile(outputPath, content);
63
331
  }
64
332
  static async generateAsset(fromDirectory, toDirectory, packageName) {
65
333
  const assetPath = (0, node_path_1.join)(fromDirectory, 'packages', packageName, 'imports', 'asset.json');
66
- const destinationPath = (0, node_path_1.join)(toDirectory, 'connector', 'config', 'assets', 'asset.yaml');
334
+ // Generate unique asset filename and name based on package name to avoid overwrites
335
+ // e.g., sw_google_gsuite -> google_gsuite_asset.yaml with name: google_gsuite_asset
336
+ const assetNameBase = packageName.replace(/^sw_/, '').toLowerCase();
337
+ const assetFileName = `${assetNameBase}_asset.yaml`;
338
+ const assetName = `${assetNameBase}_asset`;
339
+ const destinationPath = (0, node_path_1.join)(toDirectory, 'connector', 'config', 'assets', assetFileName);
67
340
  try {
68
341
  await node_fs_1.promises.access(assetPath);
69
342
  const assetContent = await node_fs_1.promises.readFile(assetPath, 'utf8');
@@ -72,17 +345,15 @@ class ConnectorGenerator {
72
345
  const requiredInputs = [];
73
346
  for (const [key, rawValue] of Object.entries(assetJson.inputParameters || {})) {
74
347
  const value = rawValue;
348
+ const schemaType = assetInputTypeToSchemaType(value.type);
75
349
  inputProperties[key] = {
76
350
  title: value.name || key,
77
351
  description: value.description || '',
78
- type: 'string',
352
+ type: schemaType,
79
353
  };
80
354
  if (value.type === 4) {
81
355
  inputProperties[key].format = 'password';
82
356
  }
83
- if (value.example !== undefined) {
84
- inputProperties[key].example = value.example;
85
- }
86
357
  if (value.default !== undefined) {
87
358
  inputProperties[key].default = value.default;
88
359
  }
@@ -90,22 +361,163 @@ class ConnectorGenerator {
90
361
  requiredInputs.push(key);
91
362
  }
92
363
  }
93
- const assetYaml = js_yaml_1.default.dump({
364
+ const assetData = {
94
365
  schema: 'asset/1',
95
- name: 'asset',
96
- title: assetJson.name,
97
- description: assetJson.description,
366
+ name: assetName,
367
+ title: assetJson.name || 'Asset',
368
+ description: assetJson.description || '',
98
369
  inputs: {
99
370
  type: 'object',
100
371
  properties: inputProperties,
101
372
  required: requiredInputs.length > 0 ? requiredInputs : undefined,
102
373
  },
103
374
  meta: {},
104
- });
105
- await node_fs_1.promises.writeFile(destinationPath, assetYaml, 'utf8');
375
+ };
376
+ // Ensure title is always a non-empty string and truncate to 50 characters
377
+ if (!assetData.title || typeof assetData.title !== 'string' || assetData.title.trim() === '') {
378
+ assetData.title = 'Asset';
379
+ }
380
+ else {
381
+ assetData.title = assetData.title.trim();
382
+ if (assetData.title.length > 50) {
383
+ assetData.title = assetData.title.slice(0, 50);
384
+ }
385
+ }
386
+ const assetYaml = js_yaml_1.default.dump(assetData, { lineWidth: -1, noRefs: true });
387
+ // Validate the YAML can be parsed back and has required fields
388
+ let parsed;
389
+ try {
390
+ parsed = js_yaml_1.default.load(assetYaml);
391
+ if (!parsed || !parsed.title) {
392
+ throw new Error('Generated asset YAML is missing required title field');
393
+ }
394
+ }
395
+ catch (parseError) {
396
+ console.error(`Generated invalid asset YAML for ${packageName}:`, parseError);
397
+ console.error('Asset data:', JSON.stringify(assetData, null, 2));
398
+ throw new Error(`Failed to generate valid asset YAML for ${packageName}: ${parseError}`);
399
+ }
400
+ // Write atomically using a temporary file to prevent corruption
401
+ const dir = (0, node_path_1.join)(destinationPath, '..');
402
+ await node_fs_1.promises.mkdir(dir, { recursive: true });
403
+ // Write to a temporary file first, then rename (atomic operation)
404
+ const tempFile = (0, node_path_1.join)((0, node_os_1.tmpdir)(), `${assetName}-${Date.now()}-${Math.random().toString(36).slice(7)}.yaml`);
405
+ try {
406
+ await node_fs_1.promises.writeFile(tempFile, assetYaml, 'utf8');
407
+ // Atomically move the temp file to the final location
408
+ await node_fs_1.promises.rename(tempFile, destinationPath);
409
+ console.log(`Generated asset file: ${assetFileName} (name: ${assetName}) for package: ${packageName}`);
410
+ }
411
+ catch (writeError) {
412
+ // Clean up temp file if it exists
413
+ await node_fs_1.promises.unlink(tempFile).catch(() => { });
414
+ throw writeError;
415
+ }
416
+ }
417
+ catch (error) {
418
+ console.error(`Error generating asset ${assetFileName} for ${packageName}:`, error);
419
+ }
420
+ }
421
+ /**
422
+ * Adds asset parameters from task input mappings to a separate input asset file.
423
+ * Creates input_asset.yaml to avoid overwriting existing asset.yaml from forked plugins.
424
+ */
425
+ static async addAssetParameters(toDirectory, assetParameters, applicationName) {
426
+ if (assetParameters.length === 0) {
427
+ return;
428
+ }
429
+ const destinationPath = (0, node_path_1.join)(toDirectory, 'connector', 'config', 'assets', 'input_asset.yaml');
430
+ try {
431
+ // Create title and truncate to 50 characters
432
+ const fullTitle = applicationName ? `${applicationName} Key Store` : 'Input Asset';
433
+ const assetTitle = fullTitle.length > 50 ? fullTitle.slice(0, 50) : fullTitle;
434
+ let assetData = {
435
+ schema: 'asset/1',
436
+ name: 'input_asset',
437
+ title: assetTitle,
438
+ description: '',
439
+ inputs: {
440
+ type: 'object',
441
+ properties: {},
442
+ required: [],
443
+ },
444
+ meta: {},
445
+ };
446
+ // Try to read existing input_asset.yaml (in case we're adding more parameters)
447
+ try {
448
+ const existingContent = await node_fs_1.promises.readFile(destinationPath, 'utf8');
449
+ assetData = js_yaml_1.default.load(existingContent);
450
+ // Update title if application name is provided and truncate to 50 characters
451
+ if (applicationName) {
452
+ assetData.title = assetTitle;
453
+ }
454
+ else if (!assetData.title || assetData.title.trim() === '') {
455
+ assetData.title = 'Input Asset';
456
+ }
457
+ else {
458
+ // Ensure existing title is also truncated
459
+ assetData.title = assetData.title.trim();
460
+ if (assetData.title.length > 50) {
461
+ assetData.title = assetData.title.slice(0, 50);
462
+ }
463
+ }
464
+ // Ensure structure exists
465
+ if (!assetData.inputs) {
466
+ assetData.inputs = { type: 'object', properties: {}, required: [] };
467
+ }
468
+ if (!assetData.inputs.properties) {
469
+ assetData.inputs.properties = {};
470
+ }
471
+ if (!assetData.inputs.required) {
472
+ assetData.inputs.required = [];
473
+ }
474
+ }
475
+ catch {
476
+ // Asset doesn't exist, use default structure
477
+ // Ensure directory exists
478
+ await node_fs_1.promises.mkdir((0, node_path_1.join)(toDirectory, 'connector', 'config', 'assets'), { recursive: true });
479
+ }
480
+ // Add asset parameters from tasks
481
+ for (const param of assetParameters) {
482
+ const key = param.Key;
483
+ if (!assetData.inputs.properties[key]) {
484
+ assetData.inputs.properties[key] = {
485
+ title: key,
486
+ type: 'string',
487
+ };
488
+ // If it's a credentials type, mark as password format
489
+ if (param.Type === 'credentials' || param.Type === 'asset') {
490
+ assetData.inputs.properties[key].format = 'password';
491
+ }
492
+ // Add example if provided
493
+ if (param.Example !== undefined && param.Example !== null) {
494
+ assetData.inputs.properties[key].examples = Array.isArray(param.Example) ? param.Example : [param.Example];
495
+ }
496
+ }
497
+ }
498
+ // Title is already set above, just ensure it's truncated if needed
499
+ if (assetData.title && assetData.title.length > 50) {
500
+ assetData.title = assetData.title.slice(0, 50);
501
+ }
502
+ // Write atomically using a temporary file to prevent corruption
503
+ const dir = (0, node_path_1.join)(destinationPath, '..');
504
+ await node_fs_1.promises.mkdir(dir, { recursive: true });
505
+ const assetYaml = js_yaml_1.default.dump(assetData, { lineWidth: -1, noRefs: true });
506
+ // Write to a temporary file first, then rename (atomic operation)
507
+ const tempFile = (0, node_path_1.join)((0, node_os_1.tmpdir)(), `input_asset-${Date.now()}-${Math.random().toString(36).slice(7)}.yaml`);
508
+ try {
509
+ await node_fs_1.promises.writeFile(tempFile, assetYaml, 'utf8');
510
+ // Atomically move the temp file to the final location
511
+ await node_fs_1.promises.rename(tempFile, destinationPath);
512
+ }
513
+ catch (writeError) {
514
+ // Clean up temp file if it exists
515
+ await node_fs_1.promises.unlink(tempFile).catch(() => { });
516
+ throw writeError;
517
+ }
106
518
  }
107
519
  catch (error) {
108
- console.error(`Error generating asset for ${packageName}:`, error);
520
+ console.error('Error adding asset parameters:', error);
109
521
  }
110
522
  }
111
523
  static async extractZip(fromDirectory, packageName) {
@@ -124,7 +536,254 @@ class ConnectorGenerator {
124
536
  return null;
125
537
  }
126
538
  }
539
+ static async addExtractedImportsToRequirements(toDirectory, taskImports) {
540
+ const requirementsPath = (0, node_path_1.join)(toDirectory, 'requirements.txt');
541
+ if (taskImports.size === 0) {
542
+ return;
543
+ }
544
+ try {
545
+ // Filter out standard library modules that don't need to be in requirements.txt
546
+ const standardLibraryModules = new Set([
547
+ 'smtplib',
548
+ 'textwrap',
549
+ 'mimetypes',
550
+ 'hmac',
551
+ 'hashlib',
552
+ 'json',
553
+ 'os',
554
+ 'sys',
555
+ 'time',
556
+ 'datetime',
557
+ 're',
558
+ 'math',
559
+ 'random',
560
+ 'string',
561
+ 'collections',
562
+ 'itertools',
563
+ 'functools',
564
+ 'operator',
565
+ 'copy',
566
+ 'pickle',
567
+ 'base64',
568
+ 'hashlib',
569
+ 'urllib',
570
+ 'http',
571
+ 'email',
572
+ 'csv',
573
+ 'xml',
574
+ 'html',
575
+ 'logging',
576
+ 'threading',
577
+ 'multiprocessing',
578
+ 'subprocess',
579
+ 'socket',
580
+ 'ssl',
581
+ 'pathlib',
582
+ 'shutil',
583
+ 'tempfile',
584
+ 'io',
585
+ 'codecs',
586
+ 'unicodedata',
587
+ 'struct',
588
+ 'array',
589
+ 'queue',
590
+ 'heapq',
591
+ 'bisect',
592
+ 'weakref',
593
+ 'types',
594
+ 'inspect',
595
+ 'traceback',
596
+ 'warnings',
597
+ 'contextlib',
598
+ 'abc',
599
+ 'atexit',
600
+ 'gc',
601
+ 'locale',
602
+ 'gettext',
603
+ 'argparse',
604
+ 'configparser',
605
+ 'fileinput',
606
+ 'glob',
607
+ 'fnmatch',
608
+ 'linecache',
609
+ 'stat',
610
+ 'errno',
611
+ 'ctypes',
612
+ 'mmap',
613
+ 'select',
614
+ 'signal',
615
+ 'pwd',
616
+ 'grp',
617
+ 'termios',
618
+ 'tty',
619
+ 'pty',
620
+ 'fcntl',
621
+ 'resource',
622
+ 'syslog',
623
+ 'platform',
624
+ 'pipes',
625
+ 'sched',
626
+ 'asyncio',
627
+ 'concurrent',
628
+ 'dbm',
629
+ 'sqlite3',
630
+ 'zlib',
631
+ 'gzip',
632
+ 'bz2',
633
+ 'lzma',
634
+ 'zipfile',
635
+ 'tarfile',
636
+ 'shlex',
637
+ 'readline',
638
+ 'rlcompleter',
639
+ 'cmd',
640
+ 'doctest',
641
+ 'unittest',
642
+ 'pdb',
643
+ 'profile',
644
+ 'pstats',
645
+ 'timeit',
646
+ 'trace',
647
+ 'cgitb',
648
+ 'pydoc',
649
+ 'dis',
650
+ 'pickletools',
651
+ 'formatter',
652
+ 'msilib',
653
+ 'msvcrt',
654
+ 'nt',
655
+ 'ntpath',
656
+ 'nturl2path',
657
+ 'winreg',
658
+ 'winsound',
659
+ 'posix',
660
+ 'posixpath',
661
+ 'pwd',
662
+ 'spwd',
663
+ 'grp',
664
+ 'crypt',
665
+ 'termios',
666
+ 'tty',
667
+ 'pty',
668
+ 'fcntl',
669
+ 'pipes',
670
+ 'resource',
671
+ 'nis',
672
+ 'syslog',
673
+ 'optparse',
674
+ 'imp',
675
+ 'importlib',
676
+ 'keyword',
677
+ 'parser',
678
+ 'ast',
679
+ 'symtable',
680
+ 'symbol',
681
+ 'token',
682
+ 'tokenize',
683
+ 'tabnanny',
684
+ 'py_compile',
685
+ 'compileall',
686
+ 'pyclbr',
687
+ 'bdb',
688
+ 'pdb',
689
+ 'profile',
690
+ 'pstats',
691
+ 'timeit',
692
+ 'trace',
693
+ 'cgitb',
694
+ 'pydoc',
695
+ 'doctest',
696
+ 'unittest',
697
+ 'test',
698
+ 'lib2to3',
699
+ 'distutils',
700
+ 'ensurepip',
701
+ 'venv',
702
+ 'wsgiref',
703
+ 'html',
704
+ 'http',
705
+ 'urllib',
706
+ 'xmlrpc',
707
+ 'ipaddress',
708
+ 'secrets',
709
+ 'statistics',
710
+ 'pathlib',
711
+ 'enum',
712
+ 'numbers',
713
+ 'fractions',
714
+ 'decimal',
715
+ 'cmath',
716
+ 'array',
717
+ 'memoryview',
718
+ 'collections',
719
+ 'heapq',
720
+ 'bisect',
721
+ 'array',
722
+ 'weakref',
723
+ 'types',
724
+ 'copy',
725
+ 'pprint',
726
+ 'reprlib',
727
+ 'dataclasses',
728
+ 'dataclasses_json',
729
+ 'typing',
730
+ 'typing_extensions',
731
+ 'backports',
732
+ 'builtins',
733
+ '__builtin__',
734
+ '__future__',
735
+ ]);
736
+ // Filter out standard library, excluded packages, and already included packages
737
+ const importsToAdd = [...new Set([...taskImports]
738
+ .map(importName => resolvePypiPackageFromImport(importName)))]
739
+ .filter(packageName => {
740
+ if (!packageName)
741
+ return false;
742
+ // Skip standard library modules
743
+ if (standardLibraryModules.has(packageName)) {
744
+ return false;
745
+ }
746
+ // Skip excluded packages (provided by runtime or installed via other means)
747
+ if (EXCLUDED_PACKAGES.has(packageName)) {
748
+ return false;
749
+ }
750
+ // Skip if it's already in requirements (we'll check this by reading the file)
751
+ return true;
752
+ })
753
+ .filter(Boolean);
754
+ if (importsToAdd.length === 0) {
755
+ return;
756
+ }
757
+ // Read existing requirements to avoid duplicates
758
+ let existingRequirements = '';
759
+ try {
760
+ existingRequirements = await node_fs_1.promises.readFile(requirementsPath, 'utf8');
761
+ }
762
+ catch {
763
+ // File might not exist yet, that's okay
764
+ }
765
+ const existingPackages = new Set(existingRequirements
766
+ .split('\n')
767
+ .map(line => line.trim().split(/[!<=>]/)[0].toLowerCase())
768
+ .filter(Boolean));
769
+ // Add imports that aren't already in requirements.txt
770
+ const newPackages = importsToAdd
771
+ .filter(packageName => !existingPackages.has(packageName));
772
+ if (newPackages.length > 0) {
773
+ await node_fs_1.promises.appendFile(requirementsPath, newPackages.join('\n') + '\n');
774
+ console.log(`Added ${newPackages.length} extracted import(s) to requirements.txt`);
775
+ }
776
+ }
777
+ catch (error) {
778
+ console.error('Error adding extracted imports to requirements.txt:', error);
779
+ }
780
+ }
781
+ /**
782
+ * Appends forked plugin dependencies to requirements.txt.
783
+ * Returns the set of package names that were excluded but need runner.sh (e.g. ssdeep).
784
+ */
127
785
  static async generateRequirements(fromDirectory, toDirectory, packageName) {
786
+ const excludedRunner = new Set();
128
787
  const packageExtractedDir = (0, node_path_1.join)(fromDirectory, 'packages', packageName);
129
788
  const requirementsPath = (0, node_path_1.join)(toDirectory, 'requirements.txt');
130
789
  try {
@@ -132,7 +791,7 @@ class ConnectorGenerator {
132
791
  const whlFiles = files.filter(file => file.endsWith('.whl'));
133
792
  if (whlFiles.length === 0) {
134
793
  console.warn(`No .whl files found in ${packageExtractedDir}`);
135
- return;
794
+ return excludedRunner;
136
795
  }
137
796
  const dependencies = whlFiles
138
797
  .map(whlFile => {
@@ -145,14 +804,253 @@ class ConnectorGenerator {
145
804
  if (packageNameFromWhl === packageName) {
146
805
  return null;
147
806
  }
807
+ // Skip excluded packages (provided by runtime or installed via other means)
808
+ if (EXCLUDED_PACKAGES.has(packageNameFromWhl.toLowerCase())) {
809
+ if (RUNNER_EXCLUDED_PACKAGES.has(packageNameFromWhl.toLowerCase())) {
810
+ excludedRunner.add(packageNameFromWhl.toLowerCase());
811
+ }
812
+ return null;
813
+ }
148
814
  return `${packageNameFromWhl}==${packageVersion}`;
149
815
  })
150
816
  .filter(Boolean);
151
817
  await node_fs_1.promises.appendFile(requirementsPath, dependencies.join('\n') + '\n');
152
818
  console.log(`requirements.txt generated at: ${requirementsPath}`);
819
+ return excludedRunner;
153
820
  }
154
821
  catch (error) {
155
822
  console.error('Error generating requirements.txt:', error);
823
+ return excludedRunner;
824
+ }
825
+ }
826
+ /**
827
+ * Resolves version conflicts by taking the highest version requirement.
828
+ * Handles common cases like ==, >=, <=, >, < constraints.
829
+ */
830
+ static resolveVersionConflict(packageName, requirements) {
831
+ if (requirements.length === 1) {
832
+ return requirements[0];
833
+ }
834
+ const versionInfo = requirements.map(req => {
835
+ // Match version specifiers: ==2.0, >=2.0, <=2.0, >2.0, <2.0, ~=2.0
836
+ const match = req.match(/([!<=>~]+)\s*([\d.]+(?:[\dA-Za-z]*)?)/);
837
+ if (!match) {
838
+ return { req, operator: null, version: null, versionParts: null };
839
+ }
840
+ const operator = match[1];
841
+ const versionStr = match[2];
842
+ // Parse version into parts for comparison
843
+ const versionParts = versionStr.split('.').map(part => {
844
+ const numMatch = part.match(/^(\d+)/);
845
+ return numMatch ? Number.parseInt(numMatch[1], 10) : 0;
846
+ });
847
+ return { req, operator, version: versionStr, versionParts };
848
+ });
849
+ // Filter out requirements without version info
850
+ const withVersions = versionInfo.filter((v) => v.version !== null && v.operator !== null && v.versionParts !== null);
851
+ if (withVersions.length === 0) {
852
+ // No version info, return first
853
+ console.warn(`Multiple requirements for ${packageName} without version info: ${requirements.join(', ')}. Using: ${requirements[0]}`);
854
+ return requirements[0];
855
+ }
856
+ // Find the highest version
857
+ let highest = withVersions[0];
858
+ for (let i = 1; i < withVersions.length; i++) {
859
+ const current = withVersions[i];
860
+ // Compare versions
861
+ const comparison = this.compareVersions(current.versionParts, highest.versionParts);
862
+ if (comparison > 0) {
863
+ // Current is higher
864
+ highest = current;
865
+ }
866
+ else if (comparison === 0) {
867
+ // Same version, prefer == over >= or other operators
868
+ if (current.operator === '==' && highest.operator !== '==') {
869
+ highest = current;
870
+ }
871
+ else if (current.operator === '>=' && highest.operator === '>=') {
872
+ // Both are >= with same version, keep current (they're equivalent)
873
+ // No change needed
874
+ }
875
+ }
876
+ }
877
+ // Log if we're resolving a conflict
878
+ if (withVersions.length > 1) {
879
+ const allReqs = requirements.join(', ');
880
+ console.log(`Resolved version conflict for ${packageName}: ${allReqs} -> ${highest.req}`);
881
+ }
882
+ return highest.req;
883
+ }
884
+ /**
885
+ * Compares two version arrays (e.g., [2, 5, 0] vs [2, 4, 1]).
886
+ * Returns: positive if v1 > v2, negative if v1 < v2, 0 if equal.
887
+ */
888
+ static compareVersions(v1, v2) {
889
+ const maxLength = Math.max(v1.length, v2.length);
890
+ for (let i = 0; i < maxLength; i++) {
891
+ const part1 = v1[i] || 0;
892
+ const part2 = v2[i] || 0;
893
+ if (part1 > part2)
894
+ return 1;
895
+ if (part1 < part2)
896
+ return -1;
897
+ }
898
+ return 0;
899
+ }
900
+ /**
901
+ * Deduplicates requirements.txt and resolves version conflicts.
902
+ * When multiple versions of the same package are specified, keeps the highest version.
903
+ */
904
+ static async deduplicateRequirements(toDirectory) {
905
+ const requirementsPath = (0, node_path_1.join)(toDirectory, 'requirements.txt');
906
+ try {
907
+ let content = '';
908
+ try {
909
+ content = await node_fs_1.promises.readFile(requirementsPath, 'utf8');
910
+ }
911
+ catch {
912
+ // File might not exist, nothing to deduplicate
913
+ return;
914
+ }
915
+ const lines = content
916
+ .split('\n')
917
+ .map(line => line.trim())
918
+ .filter(line => line && !line.startsWith('#'));
919
+ if (lines.length === 0) {
920
+ return;
921
+ }
922
+ // Parse requirements into a map: packageName -> requirements[]
923
+ const requirementsMap = new Map();
924
+ for (const line of lines) {
925
+ // Extract package name (everything before version specifiers like ==, >=, <=, >, <, !=)
926
+ // Handle packages with extras like requests[security]>=2.0
927
+ const packageMatch = line.match(/^([\w[\]-]+?)(?:\s*[!<=>]|$)/);
928
+ if (!packageMatch)
929
+ continue;
930
+ // Extract base package name (remove extras like [security])
931
+ const fullPackageName = packageMatch[1];
932
+ const basePackageName = fullPackageName.split('[')[0].toLowerCase();
933
+ const requirement = line.trim();
934
+ if (!requirementsMap.has(basePackageName)) {
935
+ requirementsMap.set(basePackageName, []);
936
+ }
937
+ requirementsMap.get(basePackageName).push(requirement);
938
+ }
939
+ // Deduplicate and resolve conflicts
940
+ const deduplicated = [];
941
+ const strippedPackages = [];
942
+ for (const [packageName, reqs] of requirementsMap.entries()) {
943
+ // Skip excluded packages (provided by runtime or installed via other means)
944
+ if (EXCLUDED_PACKAGES.has(packageName)) {
945
+ continue;
946
+ }
947
+ // Strip version for packages known to have Python 3.11+ compatibility issues
948
+ if (PACKAGES_TO_STRIP_VERSION.has(packageName)) {
949
+ // Extract the package name with extras (e.g., requests[security] -> requests[security])
950
+ const reqWithExtras = reqs[0].match(/^([\w-]+(?:\[[^\]]+])?)/);
951
+ const packageWithExtras = reqWithExtras ? reqWithExtras[1] : packageName;
952
+ deduplicated.push(packageWithExtras);
953
+ strippedPackages.push(packageName);
954
+ continue;
955
+ }
956
+ if (reqs.length === 1) {
957
+ // Single requirement, use it as-is
958
+ deduplicated.push(reqs[0]);
959
+ }
960
+ else {
961
+ // Multiple requirements for same package - need to resolve
962
+ const uniqueReqs = [...new Set(reqs)];
963
+ if (uniqueReqs.length === 1) {
964
+ // All are identical, use one
965
+ deduplicated.push(uniqueReqs[0]);
966
+ }
967
+ else {
968
+ // Different versions/constraints - resolve by taking the higher version
969
+ const resolved = this.resolveVersionConflict(packageName, uniqueReqs);
970
+ deduplicated.push(resolved);
971
+ }
972
+ }
973
+ }
974
+ // Log stripped packages
975
+ if (strippedPackages.length > 0) {
976
+ console.log(`Stripped version constraints for Python 3.11+ compatibility: ${strippedPackages.join(', ')}`);
977
+ }
978
+ // Ensure datetime_parser is set to version 1.1.0 if present
979
+ const datetimeParserIndex = deduplicated.findIndex(req => {
980
+ const packageName = req.trim().split(/[!<=>]/)[0].toLowerCase();
981
+ return packageName === 'datetime_parser';
982
+ });
983
+ if (datetimeParserIndex !== -1) {
984
+ deduplicated[datetimeParserIndex] = 'datetime_parser==1.1.0';
985
+ }
986
+ // Ensure pyflattener is set to version 1.1.0 if present
987
+ const pyflattenerIndex = deduplicated.findIndex(req => {
988
+ const packageName = req.trim().split(/[!<=>]/)[0].toLowerCase();
989
+ return packageName === 'pyflattener';
990
+ });
991
+ if (pyflattenerIndex !== -1) {
992
+ deduplicated[pyflattenerIndex] = 'pyflattener==1.1.0';
993
+ }
994
+ // Sort alphabetically for consistency
995
+ deduplicated.sort();
996
+ // Write back the deduplicated requirements
997
+ const deduplicatedContent = deduplicated.join('\n') + '\n';
998
+ await node_fs_1.promises.writeFile(requirementsPath, deduplicatedContent, 'utf8');
999
+ const removedCount = lines.length - deduplicated.length;
1000
+ if (removedCount > 0) {
1001
+ console.log(`Deduplicated requirements.txt: removed ${removedCount} duplicate(s)`);
1002
+ }
1003
+ }
1004
+ catch (error) {
1005
+ console.error('Error deduplicating requirements.txt:', error);
1006
+ }
1007
+ }
1008
+ /**
1009
+ * Creates compile.sh in the connector root if swimbundle_utils was found in requirements.
1010
+ * This handles the special case where swimbundle_utils needs to be installed separately.
1011
+ */
1012
+ static async generateCompileScript(toDirectory) {
1013
+ const compileScriptPath = (0, node_path_1.join)(toDirectory, 'compile.sh');
1014
+ try {
1015
+ // Always create compile.sh (it's safe to have even if swimbundle_utils wasn't in requirements)
1016
+ const compileScriptContent = 'pip install --user swimbundle_utils==4.8.0 dominions --no-deps\n';
1017
+ await node_fs_1.promises.writeFile(compileScriptPath, compileScriptContent, 'utf8');
1018
+ // Make it executable
1019
+ await node_fs_1.promises.chmod(compileScriptPath, 0o755);
1020
+ }
1021
+ catch (error) {
1022
+ console.error('Error generating compile.sh:', error);
1023
+ }
1024
+ }
1025
+ /**
1026
+ * Creates runner.sh in the connector root with apt/pip installs for optional system deps:
1027
+ * - wkhtmltopdf when connector uses sw_swimlane_email
1028
+ * - ssdeep (apt + pip install --user) when needsSsdeep (e.g. from task imports; ssdeep is excluded from requirements.txt)
1029
+ */
1030
+ static async generateRunnerScript(toDirectory, usesSwimlaneEmail, needsSsdeep) {
1031
+ const runnerPath = (0, node_path_1.join)(toDirectory, 'runner.sh');
1032
+ const parts = [];
1033
+ if (usesSwimlaneEmail || needsSsdeep) {
1034
+ parts.push('apt update -y\n');
1035
+ const installs = [];
1036
+ if (usesSwimlaneEmail)
1037
+ installs.push('wkhtmltopdf');
1038
+ if (needsSsdeep)
1039
+ installs.push('ssdeep', 'libfuzzy-dev', 'gcc');
1040
+ if (installs.length > 0)
1041
+ parts.push(`apt install -y ${installs.join(' ')}\n`);
1042
+ }
1043
+ if (needsSsdeep)
1044
+ parts.push('pip install ssdeep --user\n');
1045
+ if (parts.length === 0)
1046
+ return;
1047
+ try {
1048
+ const scriptContent = parts.join('');
1049
+ await node_fs_1.promises.writeFile(runnerPath, scriptContent, 'utf8');
1050
+ await node_fs_1.promises.chmod(runnerPath, 0o755);
1051
+ }
1052
+ catch (error) {
1053
+ console.error('Error generating runner.sh:', error);
156
1054
  }
157
1055
  }
158
1056
  static async createBaseCode(fromDirectory, toDirectory, packageName) {
@@ -173,17 +1071,67 @@ class ConnectorGenerator {
173
1071
  console.error(`Could not find base package folder inside: ${whlPath}`);
174
1072
  return;
175
1073
  }
176
- const destinationPath = (0, node_path_1.join)(toDirectory, 'connector', 'src', packageName);
1074
+ const destinationPath = (0, node_path_1.join)(toDirectory, 'connector', packageName);
177
1075
  await node_fs_1.promises.mkdir(destinationPath, { recursive: true });
178
1076
  const baseCodePath = (0, node_path_1.join)(tempExtractDir, basePackageDir);
179
1077
  const baseFiles = await node_fs_1.promises.readdir(baseCodePath);
180
- await Promise.all(baseFiles.map(file => node_fs_1.promises.rename((0, node_path_1.join)(baseCodePath, file), (0, node_path_1.join)(destinationPath, file))));
181
- console.log(`Base code for ${packageName} moved successfully.`);
1078
+ await Promise.all(baseFiles.map(async (file) => {
1079
+ const sourcePath = (0, node_path_1.join)(baseCodePath, file);
1080
+ const destPath = (0, node_path_1.join)(destinationPath, file);
1081
+ const stat = await node_fs_1.promises.stat(sourcePath);
1082
+ // Copy directory recursively or copy file
1083
+ await (stat.isDirectory() ?
1084
+ this.copyDirectoryRecursive(sourcePath, destPath) :
1085
+ (0, promises_1.copyFile)(sourcePath, destPath));
1086
+ }));
1087
+ if (packageName === 'sw_swimlane_email') {
1088
+ await this.patchSwimlaneEmailInit((0, node_path_1.join)(destinationPath, '__init__.py'));
1089
+ }
1090
+ console.log(`Base code for ${packageName} copied successfully.`);
1091
+ }
1092
+ catch (error) {
1093
+ console.error(`Error extracting and copying base code for ${packageName}:`, error);
1094
+ }
1095
+ }
1096
+ /**
1097
+ * Patch sw_swimlane_email __init__.py execute() to use system wkhtmltoimage and Config instead of pkg_resources/chmod.
1098
+ * Matches the block after "def execute(self):" containing:
1099
+ * f = pkg_resources.resource_filename(__name__, "__packages/wkhtmltoimage") # ...
1100
+ * c = Config(f)
1101
+ * st = os.stat(f)
1102
+ * os.chmod(f, st.st_mode | stat.S_IEXEC) # ...
1103
+ */
1104
+ static async patchSwimlaneEmailInit(initPath) {
1105
+ try {
1106
+ await node_fs_1.promises.access(initPath);
1107
+ const content = await node_fs_1.promises.readFile(initPath, 'utf8');
1108
+ const oldPattern = /(\s+def execute\(self\):)\s*\n\s+f = pkg_resources\.resource_filename\(__name__, ["']__packages\/wkhtmltoimage["']\)[^\n]*\n\s+c = Config\(f\)\s*\n\s+st = os\.stat\(f\)\s*\n\s+os\.chmod\(f, st\.st_mode \| stat\.S_IEXEC\)[^\n]*/m;
1109
+ const newBody = `$1
1110
+ path_wkhtmltoimage = '/usr/bin/wkhtmltoimage'
1111
+ c = Config(wkhtmltoimage=path_wkhtmltoimage)`;
1112
+ const newContent = content.replace(oldPattern, newBody);
1113
+ if (newContent !== content) {
1114
+ await node_fs_1.promises.writeFile(initPath, newContent, 'utf8');
1115
+ console.log('Patched sw_swimlane_email __init__.py execute() for wkhtmltoimage/Config');
1116
+ }
182
1117
  }
183
1118
  catch (error) {
184
- console.error(`Error extracting and moving base code for ${packageName}:`, error);
1119
+ console.error(`Error patching sw_swimlane_email __init__.py: ${initPath}`, error);
185
1120
  }
186
1121
  }
1122
+ static async copyDirectoryRecursive(sourceDir, destDir) {
1123
+ await node_fs_1.promises.mkdir(destDir, { recursive: true });
1124
+ const entries = await node_fs_1.promises.readdir(sourceDir, { withFileTypes: true });
1125
+ await Promise.all(entries.map(async (entry) => {
1126
+ // Skip __pycache__ directories
1127
+ if (entry.name === '__pycache__') {
1128
+ return;
1129
+ }
1130
+ const sourcePath = (0, node_path_1.join)(sourceDir, entry.name);
1131
+ const destPath = (0, node_path_1.join)(destDir, entry.name);
1132
+ await (entry.isDirectory() ? this.copyDirectoryRecursive(sourcePath, destPath) : (0, promises_1.copyFile)(sourcePath, destPath));
1133
+ }));
1134
+ }
187
1135
  static async getBaseCodePath(fromDirectory, packageName) {
188
1136
  const packageDir = (0, node_path_1.join)(fromDirectory, 'packages', packageName);
189
1137
  try {
@@ -200,23 +1148,111 @@ class ConnectorGenerator {
200
1148
  return null;
201
1149
  }
202
1150
  }
203
- static async getActionContentFork(script) {
1151
+ /**
1152
+ * Build Python snippet to rebuild inputs from YAML-defined keys (empty per type) then merge asset then inputs.
1153
+ * Only includes keys where the task InputMapping has an associated Type and Value.
1154
+ * Placeholder # INPUTS_MERGE_HERE in templates is replaced with this.
1155
+ * @param includeAttachmentBlock - when true (script_override only), include attachment conversion and dict handling; when false (plugin_override), only temp_inputs merge.
1156
+ */
1157
+ static defaultPyValue(valueType) {
1158
+ switch (valueType) {
1159
+ case 'number':
1160
+ case 'integer': {
1161
+ return 'None';
1162
+ }
1163
+ case 'boolean': {
1164
+ return 'False';
1165
+ }
1166
+ case 'array': {
1167
+ return '[]';
1168
+ }
1169
+ default: {
1170
+ return "''";
1171
+ }
1172
+ }
1173
+ }
1174
+ static buildInputsMergeSnippet(inputs, includeAttachmentBlock) {
1175
+ const withTypeAndValue = inputs.filter(inp => inp.Type !== null && inp.Type !== undefined &&
1176
+ inp.Value !== null && inp.Value !== undefined);
1177
+ const entries = [];
1178
+ for (const inp of withTypeAndValue) {
1179
+ entries.push(`${JSON.stringify(inp.Key)}: ${this.defaultPyValue(inp.ValueType)}`);
1180
+ }
1181
+ const dictLiteral = entries.length > 0 ? `{${entries.join(', ')}}` : '{}';
1182
+ const i = ' ';
1183
+ // First line has no leading indent: template already has 8 spaces before # INPUTS_MERGE_HERE
1184
+ const baseMerge = includeAttachmentBlock ?
1185
+ `temp_inputs = ${dictLiteral}\n${i}temp_inputs.update(self.asset)\n` :
1186
+ `temp_inputs = ${dictLiteral}\n`;
1187
+ // if (!includeAttachmentBlock) {
1188
+ // return `${baseMerge}${i}temp_inputs.update(inputs)\n${i}inputs = temp_inputs`
1189
+ // }
1190
+ const i2 = ' ';
1191
+ const i3 = ' ';
1192
+ const i4 = ' ';
1193
+ const attachmentBlock = [
1194
+ `${i}attachments = {}`,
1195
+ `${i}files = inputs.pop('files', [])`,
1196
+ `${i}import base64`,
1197
+ `${i}def find_and_convert_attachments(filename, files):`,
1198
+ `${i2}for file in files:`,
1199
+ `${i3}if isinstance(file, (list, tuple)):`,
1200
+ `${i3} current_filename, file_obj = file`,
1201
+ `${i3} if current_filename == filename:`,
1202
+ `${i3} files.remove(file)`,
1203
+ `${i3} return {'filename': current_filename, 'base64': base64.b64encode(file_obj.read()).decode()}`,
1204
+ `${i}for field in inputs:`,
1205
+ `${i2}if type(inputs[field]) is list:`,
1206
+ `${i3}updated_value = [find_and_convert_attachments(i['file_name'], files) for i in inputs[field] if 'file' in i and 'file_name' in i]`,
1207
+ `${i3}if updated_value:`,
1208
+ `${i4}attachments[field] = updated_value`,
1209
+ `${i2}if type(inputs[field]) is dict:`,
1210
+ `${i3}# this is actually a conversion for user inputs, but we'll take the oppt to fix it here`,
1211
+ `${i3}if 'id' in inputs[field] and 'name' in inputs[field]:`,
1212
+ `${i4}attachments[field] = inputs[field]['name']`,
1213
+ `${i}inputs.update(attachments)`,
1214
+ '',
1215
+ `${i}temp_inputs.update(inputs)`,
1216
+ `${i}inputs = temp_inputs`,
1217
+ ].join('\n');
1218
+ return `${baseMerge}\n${attachmentBlock}`;
1219
+ }
1220
+ static replaceTaskExecuteRequestCall(content) {
1221
+ return content.replaceAll(this.TASK_EXECUTE_REQUEST_CALL, this.TASK_EXECUTE_WEBHOOK_CALL);
1222
+ }
1223
+ /** Build Python dict literal for OUTPUT_DATE_CONVERSIONS (key -> timetype/format). */
1224
+ static buildOutputDateConversionsDict(conversions) {
1225
+ if (!conversions?.length)
1226
+ return '{}';
1227
+ const obj = {};
1228
+ for (const { key, timetype } of conversions)
1229
+ obj[key] = timetype;
1230
+ return JSON.stringify(obj);
1231
+ }
1232
+ static async getActionContentFork(script, inputs, outputDateConversions) {
204
1233
  try {
205
- const templateContent = await node_fs_1.promises.readFile((0, node_path_1.join)(__dirname, '../templates/migrator-runners/plugin_override.txt'), 'utf8');
1234
+ let templateContent = await node_fs_1.promises.readFile((0, node_path_1.join)(__dirname, '../templates/migrator-runners/plugin_override.txt'), 'utf8');
206
1235
  // Remove any carriage returns to avoid CRLF issues
207
1236
  const scriptNoCR = script.replaceAll('\r', '');
208
- return templateContent.replace('# HERE', scriptNoCR);
1237
+ const inputsMergeSnippet = this.buildInputsMergeSnippet(inputs, false);
1238
+ templateContent = templateContent.replace(this.INPUTS_MERGE_PLACEHOLDER, inputsMergeSnippet);
1239
+ templateContent = templateContent.replace(this.OUTPUT_DATE_CONVERSIONS_PLACEHOLDER, this.buildOutputDateConversionsDict(outputDateConversions));
1240
+ // Use function replacer so `$` sequences in Python code stay literal.
1241
+ return templateContent.replace('# HERE', () => scriptNoCR);
209
1242
  }
210
1243
  catch (error) {
211
1244
  console.error('Failed to load action forked template', error);
212
1245
  return `Error during forked plugin generation: ${error}`;
213
1246
  }
214
1247
  }
215
- static async getActionContentScript(script) {
1248
+ static async getActionContentScript(script, inputs, outputDateConversions) {
216
1249
  try {
217
- const templateContent = await node_fs_1.promises.readFile((0, node_path_1.join)(__dirname, '../templates/migrator-runners/script_override.txt'), 'utf8');
1250
+ let templateContent = await node_fs_1.promises.readFile((0, node_path_1.join)(__dirname, '../templates/migrator-runners/script_override.txt'), 'utf8');
218
1251
  // Remove any carriage returns to avoid CRLF issues
219
1252
  const scriptNoCR = script.replaceAll('\r', '');
1253
+ const inputsMergeSnippet = this.buildInputsMergeSnippet(inputs, true);
1254
+ templateContent = templateContent.replace(this.INPUTS_MERGE_PLACEHOLDER, inputsMergeSnippet);
1255
+ templateContent = templateContent.replace(this.OUTPUT_DATE_CONVERSIONS_PLACEHOLDER, this.buildOutputDateConversionsDict(outputDateConversions));
220
1256
  const lines = scriptNoCR.split('\n');
221
1257
  if (lines.length === 0) {
222
1258
  return templateContent.replace('# HERE', '');
@@ -228,7 +1264,8 @@ class ConnectorGenerator {
228
1264
  const combinedScript = subsequentIndented ?
229
1265
  `${firstLine}\n${subsequentIndented}` :
230
1266
  firstLine;
231
- return templateContent.replace('# HERE', combinedScript);
1267
+ // Use function replacer so `$` sequences in Python code stay literal.
1268
+ return templateContent.replace('# HERE', () => combinedScript);
232
1269
  }
233
1270
  catch (error) {
234
1271
  console.error('Failed to load action script template', error);
@@ -238,20 +1275,59 @@ class ConnectorGenerator {
238
1275
  static async generateActionConfig(transformationResult, toDirectory) {
239
1276
  const exportUid = transformationResult.exportUid;
240
1277
  const outputPath = (0, node_path_1.join)(toDirectory, 'connector', 'config', 'actions', `${exportUid}.yaml`);
1278
+ // Format description with task ID prepended if available
1279
+ const description = transformationResult.taskId ?
1280
+ `${transformationResult.taskId} - ${transformationResult.description || ''}` :
1281
+ transformationResult.description || '';
1282
+ const truncatedDescription = this.truncateActionDescription(description);
241
1283
  const yamlData = {
242
1284
  schema: 'action/1',
243
1285
  title: transformationResult.exportName,
244
1286
  name: transformationResult.exportUid,
245
- description: transformationResult.description,
1287
+ description: truncatedDescription,
246
1288
  inputs: {
247
1289
  type: 'object',
248
- properties: {},
1290
+ properties: {
1291
+ ApplicationId: {
1292
+ title: 'Application ID',
1293
+ type: 'string',
1294
+ },
1295
+ RecordId: {
1296
+ title: 'Record ID',
1297
+ type: 'string',
1298
+ },
1299
+ SwimlaneUrl: {
1300
+ title: 'Swimlane URL',
1301
+ type: 'string',
1302
+ },
1303
+ TurbineAccountId: {
1304
+ title: 'Turbine Account ID',
1305
+ type: 'string',
1306
+ },
1307
+ TurbineTenantId: {
1308
+ title: 'Turbine Tenant ID',
1309
+ type: 'string',
1310
+ },
1311
+ ExecuteTaskWebhookUrl: {
1312
+ title: 'Execute Task Webhook URL',
1313
+ type: 'string',
1314
+ },
1315
+ },
249
1316
  required: [],
250
1317
  additionalProperties: true,
251
1318
  },
252
1319
  output: {
253
1320
  type: 'object',
254
- properties: {},
1321
+ properties: {
1322
+ output: {
1323
+ title: 'Output',
1324
+ type: 'array',
1325
+ items: {
1326
+ type: 'object',
1327
+ properties: {},
1328
+ },
1329
+ },
1330
+ },
255
1331
  required: [],
256
1332
  additionalProperties: true,
257
1333
  },
@@ -260,39 +1336,141 @@ class ConnectorGenerator {
260
1336
  method: '',
261
1337
  },
262
1338
  };
1339
+ // Only add regular inputs (exclude credentials and asset types which go to asset.yaml)
263
1340
  for (const input of transformationResult.inputs) {
264
- yamlData.inputs.properties[input.Key] = {
1341
+ const inputType = input.ValueType || 'string';
1342
+ const inputProperty = {
265
1343
  title: input.Title || input.Key,
266
- type: input.ValueType || 'string',
267
- examples: input.Example,
1344
+ type: inputType,
268
1345
  };
1346
+ if (inputType === 'array') {
1347
+ const arrayItemType = input.arrayItemType;
1348
+ const arrayItemValueType = input.arrayItemValueType;
1349
+ switch (arrayItemType) {
1350
+ case 'attachment': {
1351
+ inputProperty.items = {
1352
+ contentDisposition: 'attachment',
1353
+ type: 'object',
1354
+ additionalProperties: false,
1355
+ properties: {
1356
+ file: {
1357
+ type: 'string',
1358
+ format: 'binary',
1359
+ },
1360
+ // eslint-disable-next-line camelcase
1361
+ file_name: {
1362
+ type: 'string',
1363
+ },
1364
+ },
1365
+ };
1366
+ break;
1367
+ }
1368
+ case 'reference': {
1369
+ inputProperty.items = {
1370
+ type: 'object',
1371
+ required: [],
1372
+ };
1373
+ break;
1374
+ }
1375
+ case 'list': {
1376
+ inputProperty.items = {
1377
+ type: arrayItemValueType === 'numeric' ? 'number' : 'string',
1378
+ };
1379
+ break;
1380
+ }
1381
+ default: {
1382
+ // valueslist (multi-select, check list) or default: string items
1383
+ inputProperty.items = {
1384
+ type: 'string',
1385
+ };
1386
+ }
1387
+ }
1388
+ }
1389
+ if (input.Example !== undefined && input.Example !== null) {
1390
+ inputProperty.examples = Array.isArray(input.Example) ? input.Example : [input.Example];
1391
+ }
1392
+ yamlData.inputs.properties[input.Key] = inputProperty;
269
1393
  if (input.Creds) {
270
1394
  yamlData.inputs.properties[input.Key].format = 'password';
271
1395
  }
272
1396
  }
273
1397
  for (const output of transformationResult.outputs) {
274
- yamlData.output.properties[output.Key] = {
1398
+ const outputProperty = {
275
1399
  title: output.Title,
276
1400
  type: output.ValueType,
277
- examples: output.Example,
278
1401
  };
1402
+ if (output.Example !== undefined && output.Example !== null) {
1403
+ outputProperty.examples = Array.isArray(output.Example) ? output.Example : [output.Example];
1404
+ }
1405
+ // Ensure the key is valid and doesn't cause YAML issues
1406
+ const safeKey = output.Key || 'unnamed_output';
1407
+ yamlData.output.properties.output.items.properties[safeKey] = outputProperty;
1408
+ }
1409
+ try {
1410
+ // Ensure the output properties object is properly initialized
1411
+ if (!yamlData.output.properties.output.items.properties) {
1412
+ yamlData.output.properties.output.items.properties = {};
1413
+ }
1414
+ const yamlString = js_yaml_1.default.dump(yamlData, { indent: 2, noRefs: true, lineWidth: -1 });
1415
+ // Validate the YAML can be parsed back (catches structural issues)
1416
+ let parsed;
1417
+ try {
1418
+ parsed = js_yaml_1.default.load(yamlString);
1419
+ }
1420
+ catch (parseError) {
1421
+ console.error(`Generated invalid YAML for ${exportUid}:`, parseError);
1422
+ console.error('YAML content:', yamlString.slice(0, 500));
1423
+ throw new Error(`Failed to generate valid YAML for action ${exportUid}: ${parseError}`);
1424
+ }
1425
+ // Double-check the structure is correct
1426
+ if (!parsed?.output?.properties?.output?.items?.properties) {
1427
+ console.error(`YAML structure issue for ${exportUid}: output properties not properly nested`);
1428
+ console.error('Parsed structure:', JSON.stringify(parsed?.output, null, 2));
1429
+ }
1430
+ // Write atomically using a temporary file to prevent corruption
1431
+ const dir = (0, node_path_1.join)(outputPath, '..');
1432
+ await node_fs_1.promises.mkdir(dir, { recursive: true });
1433
+ // Write to a temporary file first, then rename (atomic operation)
1434
+ const tempFile = (0, node_path_1.join)((0, node_os_1.tmpdir)(), `${exportUid}-${Date.now()}-${Math.random().toString(36).slice(7)}.yaml`);
1435
+ try {
1436
+ await node_fs_1.promises.writeFile(tempFile, yamlString, 'utf8');
1437
+ // Atomically move the temp file to the final location
1438
+ await node_fs_1.promises.rename(tempFile, outputPath);
1439
+ }
1440
+ catch (writeError) {
1441
+ // Clean up temp file if it exists
1442
+ await node_fs_1.promises.unlink(tempFile).catch(() => { });
1443
+ throw writeError;
1444
+ }
1445
+ return true;
1446
+ }
1447
+ catch (error) {
1448
+ console.error(`Error generating action config for ${exportUid}:`, error);
1449
+ throw error;
279
1450
  }
280
- const yamlString = js_yaml_1.default.dump(yamlData, { indent: 2 });
281
- await this.createFile(outputPath, yamlString);
282
- return true;
283
1451
  }
284
1452
  static async createFile(dir, data) {
285
- await node_fs_1.promises.writeFile(dir, data);
1453
+ await node_fs_1.promises.writeFile(dir, data, 'utf8');
286
1454
  }
287
1455
  static async generateConnectorManifest(connectorConfig, group, toDir) {
288
- const connectorNameUid = `${connectorConfig.author || connectorConfig.vendor}_${group.connectorName}`
289
- .replaceAll(/[^\w -]/g, '')
290
- .replaceAll(/\s+/g, '_')
1456
+ let connectorNameUid = `${connectorConfig.author || connectorConfig.vendor}_${group.connectorName}`
1457
+ .replaceAll(/\W/g, '_') // Replace all non-word characters (including dashes, spaces, colons, etc.) with underscores
1458
+ .replaceAll(/_+/g, '_') // Replace multiple consecutive underscores with a single underscore
1459
+ .replaceAll(/^_|_$/g, '') // Remove leading and trailing underscores
291
1460
  .toLowerCase();
1461
+ // Truncate connector name to 50 characters
1462
+ if (connectorNameUid.length > 50) {
1463
+ connectorNameUid = connectorNameUid.slice(0, 50);
1464
+ }
1465
+ // Truncate title to 50 characters
1466
+ let connectorTitle = group.connectorName;
1467
+ if (connectorTitle.length > 50) {
1468
+ connectorTitle = connectorTitle.slice(0, 50);
1469
+ }
292
1470
  const data = {
293
1471
  author: connectorConfig.author || connectorConfig.vendor,
294
1472
  bugs: '',
295
- description: connectorConfig.description,
1473
+ description: connectorConfig.description || group.connectorName,
296
1474
  homepage: connectorConfig.homepage,
297
1475
  iconImage: '../image/logo.png',
298
1476
  keywords: ['Custom', 'User created'],
@@ -308,10 +1486,10 @@ class ConnectorGenerator {
308
1486
  },
309
1487
  },
310
1488
  name: connectorNameUid,
311
- product: connectorConfig.product || 'Unknown Product',
1489
+ product: connectorConfig.product || group.connectorName,
312
1490
  repository: `https://github.com/swimlane-prosrv/t_${connectorNameUid}`,
313
1491
  schema: 'connector/1',
314
- title: group.connectorName,
1492
+ title: connectorTitle,
315
1493
  vendor: connectorConfig.vendor || 'Unknown Vendor',
316
1494
  version: '1.0.0',
317
1495
  runConfig: {
@@ -324,4 +1502,9 @@ class ConnectorGenerator {
324
1502
  }
325
1503
  }
326
1504
  exports.ConnectorGenerator = ConnectorGenerator;
1505
+ ConnectorGenerator.ACTION_DESCRIPTION_MAX_LENGTH = 254;
1506
+ ConnectorGenerator.INPUTS_MERGE_PLACEHOLDER = '# INPUTS_MERGE_HERE';
1507
+ ConnectorGenerator.OUTPUT_DATE_CONVERSIONS_PLACEHOLDER = '__OUTPUT_DATE_CONVERSIONS__';
1508
+ ConnectorGenerator.TASK_EXECUTE_REQUEST_CALL = "swimlane.request('post', 'task/execute/record', json=data)";
1509
+ ConnectorGenerator.TASK_EXECUTE_WEBHOOK_CALL = 'swimlane._session.post(swimlane._execute_task_webhook_url, json=data)';
327
1510
  //# sourceMappingURL=connector-generator.js.map