informatica-python 1.4.0__tar.gz → 1.4.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. {informatica_python-1.4.0 → informatica_python-1.4.1}/PKG-INFO +1 -1
  2. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/generators/mapping_gen.py +25 -11
  3. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/generators/workflow_gen.py +4 -3
  4. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python.egg-info/PKG-INFO +1 -1
  5. {informatica_python-1.4.0 → informatica_python-1.4.1}/pyproject.toml +1 -1
  6. {informatica_python-1.4.0 → informatica_python-1.4.1}/README.md +0 -0
  7. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/__init__.py +0 -0
  8. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/cli.py +0 -0
  9. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/converter.py +0 -0
  10. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/generators/__init__.py +0 -0
  11. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/generators/config_gen.py +0 -0
  12. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/generators/error_log_gen.py +0 -0
  13. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/generators/helper_gen.py +0 -0
  14. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/generators/sql_gen.py +0 -0
  15. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/models.py +0 -0
  16. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/parser.py +0 -0
  17. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/utils/__init__.py +0 -0
  18. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/utils/datatype_map.py +0 -0
  19. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python/utils/expression_converter.py +0 -0
  20. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python.egg-info/SOURCES.txt +0 -0
  21. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python.egg-info/dependency_links.txt +0 -0
  22. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python.egg-info/entry_points.txt +0 -0
  23. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python.egg-info/requires.txt +0 -0
  24. {informatica_python-1.4.0 → informatica_python-1.4.1}/informatica_python.egg-info/top_level.txt +0 -0
  25. {informatica_python-1.4.0 → informatica_python-1.4.1}/setup.cfg +0 -0
  26. {informatica_python-1.4.0 → informatica_python-1.4.1}/tests/test_converter.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: informatica-python
3
- Version: 1.4.0
3
+ Version: 1.4.1
4
4
  Summary: Convert Informatica PowerCenter workflow XML to Python/PySpark code
5
5
  License-Expression: MIT
6
6
  Requires-Python: >=3.8
@@ -247,7 +247,7 @@ def generate_mapping_code(mapping: MappingDef, folder: FolderDef,
247
247
  lines.append(f" _src_path_{safe} = config.get('sources', {{}}).get('{src_def.name}', {{}}).get('file_path',")
248
248
  lines.append(f" os.path.join('{src_dir}', '{src_file}'))")
249
249
  if src_def.flatfile:
250
- _emit_flatfile_read(lines, safe, src_def)
250
+ _emit_flatfile_read(lines, safe, src_def, file_path_override=True)
251
251
  else:
252
252
  lines.append(f" df_{safe} = read_file(_src_path_{safe}, config.get('sources', {{}}).get('{src_def.name}', {{}}))")
253
253
  elif src_def.database_type and src_def.database_type != "Flat File":
@@ -323,15 +323,16 @@ def _flatfile_config_dict(ff):
323
323
  return cfg
324
324
 
325
325
 
326
- def _emit_flatfile_read(lines, var_name, src_def, indent=" "):
326
+ def _emit_flatfile_read(lines, var_name, src_def, indent=" ", file_path_override=None):
327
327
  ff = src_def.flatfile
328
328
  fc = _flatfile_config_dict(ff)
329
+ default_path = f"_src_path_{var_name}" if file_path_override else f"config.get('sources', {{}}).get('{src_def.name}', {{}}).get('file_path', '{src_def.name}')"
329
330
  if fc.get("fixed_width"):
330
331
  widths = []
331
332
  for fld in src_def.fields:
332
333
  widths.append(fld.precision if fld.precision else 10)
333
334
  lines.append(f"{indent}df_{var_name} = pd.read_fwf(")
334
- lines.append(f"{indent} config.get('sources', {{}}).get('{src_def.name}', {{}}).get('file_path', '{src_def.name}'),")
335
+ lines.append(f"{indent} {default_path},")
335
336
  lines.append(f"{indent} widths={widths},")
336
337
  hdr = fc.get("header_lines", 0)
337
338
  if hdr:
@@ -367,15 +368,22 @@ def _emit_flatfile_read(lines, var_name, src_def, indent=" "):
367
368
  if file_cfg:
368
369
  lines.append(f"{indent}ff_cfg_{var_name} = {repr(file_cfg)}")
369
370
  lines.append(f"{indent}ff_cfg_{var_name}.update(config.get('sources', {{}}).get('{src_def.name}', {{}}))")
370
- lines.append(f"{indent}df_{var_name} = read_file(ff_cfg_{var_name}.get('file_path', '{src_def.name}'), ff_cfg_{var_name})")
371
+ if file_path_override:
372
+ lines.append(f"{indent}df_{var_name} = read_file({default_path}, ff_cfg_{var_name})")
373
+ else:
374
+ lines.append(f"{indent}df_{var_name} = read_file(ff_cfg_{var_name}.get('file_path', '{src_def.name}'), ff_cfg_{var_name})")
371
375
  else:
372
- lines.append(f"{indent}df_{var_name} = read_file(config.get('sources', {{}}).get('{src_def.name}', {{}}).get('file_path', '{src_def.name}'),")
373
- lines.append(f"{indent} config.get('sources', {{}}).get('{src_def.name}', {{}}))")
376
+ if file_path_override:
377
+ lines.append(f"{indent}df_{var_name} = read_file({default_path}, config.get('sources', {{}}).get('{src_def.name}', {{}}))")
378
+ else:
379
+ lines.append(f"{indent}df_{var_name} = read_file(config.get('sources', {{}}).get('{src_def.name}', {{}}).get('file_path', '{src_def.name}'),")
380
+ lines.append(f"{indent} config.get('sources', {{}}).get('{src_def.name}', {{}}))")
374
381
 
375
382
 
376
- def _emit_flatfile_write(lines, var_name, tgt_def, indent=" "):
383
+ def _emit_flatfile_write(lines, var_name, tgt_def, indent=" ", file_path_override=None):
377
384
  ff = tgt_def.flatfile
378
385
  fc = _flatfile_config_dict(ff)
386
+ default_path = f"_tgt_path_{var_name}" if file_path_override else f"config.get('targets', {{}}).get('{tgt_def.name}', {{}}).get('file_path', '{tgt_def.name}')"
379
387
  file_cfg = {}
380
388
  if "delimiter" in fc:
381
389
  file_cfg["delimiter"] = fc["delimiter"]
@@ -387,10 +395,16 @@ def _emit_flatfile_write(lines, var_name, tgt_def, indent=" "):
387
395
  if file_cfg:
388
396
  lines.append(f"{indent}ff_cfg_{var_name} = {repr(file_cfg)}")
389
397
  lines.append(f"{indent}ff_cfg_{var_name}.update(config.get('targets', {{}}).get('{tgt_def.name}', {{}}))")
390
- lines.append(f"{indent}write_file(df_target_{var_name}, ff_cfg_{var_name}.get('file_path', '{tgt_def.name}'), ff_cfg_{var_name})")
398
+ if file_path_override:
399
+ lines.append(f"{indent}write_file(df_target_{var_name}, {default_path}, ff_cfg_{var_name})")
400
+ else:
401
+ lines.append(f"{indent}write_file(df_target_{var_name}, ff_cfg_{var_name}.get('file_path', '{tgt_def.name}'), ff_cfg_{var_name})")
391
402
  else:
392
- lines.append(f"{indent}write_file(df_target_{var_name}, config.get('targets', {{}}).get('{tgt_def.name}', {{}}).get('file_path', '{tgt_def.name}'),")
393
- lines.append(f"{indent} config.get('targets', {{}}).get('{tgt_def.name}', {{}}))")
403
+ if file_path_override:
404
+ lines.append(f"{indent}write_file(df_target_{var_name}, {default_path}, config.get('targets', {{}}).get('{tgt_def.name}', {{}}))")
405
+ else:
406
+ lines.append(f"{indent}write_file(df_target_{var_name}, config.get('targets', {{}}).get('{tgt_def.name}', {{}}).get('file_path', '{tgt_def.name}'),")
407
+ lines.append(f"{indent} config.get('targets', {{}}).get('{tgt_def.name}', {{}}))")
394
408
 
395
409
 
396
410
  def _build_source_map(mapping, folder):
@@ -1202,7 +1216,7 @@ def _generate_target_write(lines, tgt_name, tgt_def, connector_graph, source_dfs
1202
1216
  lines.append(f" _tgt_path_{tgt_safe} = config.get('targets', {{}}).get('{tgt_def.name}', {{}}).get('file_path',")
1203
1217
  lines.append(f" os.path.join('{out_dir}', '{out_file}'))")
1204
1218
  if tgt_def.flatfile:
1205
- _emit_flatfile_write(lines, tgt_safe, tgt_def)
1219
+ _emit_flatfile_write(lines, tgt_safe, tgt_def, file_path_override=True)
1206
1220
  else:
1207
1221
  lines.append(f" write_file(df_target_{tgt_safe}, _tgt_path_{tgt_safe}, config.get('targets', {{}}).get('{tgt_def.name}', {{}}))")
1208
1222
  elif tgt_def.database_type and tgt_def.database_type != "Flat File":
@@ -195,10 +195,11 @@ def _emit_task_code(lines, task, mapping_name_map, session_to_mapping, wf, workl
195
195
  lines.append(f" logger.info('Executing worklet: {task.name}')")
196
196
  if matched_worklet:
197
197
  lines.append(f" worklet_result_{task_safe} = run_worklet_{worklet_safe}(config)")
198
+ lines.append(f" if not worklet_result_{task_safe}:")
199
+ lines.append(f" raise RuntimeError('Worklet {worklet_name} returned failure')")
198
200
  else:
199
- lines.append(f" worklet_result_{task_safe} = run_worklet_{worklet_safe}(config)")
200
- lines.append(f" if not worklet_result_{task_safe}:")
201
- lines.append(f" raise RuntimeError('Worklet {worklet_name} returned failure')")
201
+ lines.append(f" # WARNING: Worklet '{worklet_name}' definition not found in folder")
202
+ lines.append(f" logger.warning('Worklet {worklet_name} not found — skipping')")
202
203
  lines.append(f" except Exception as e:")
203
204
  lines.append(f" logger.error(f'Worklet {task.name} failed: {{e}}')")
204
205
  if task.fail_parent_if_instance_fails == "YES":
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: informatica-python
3
- Version: 1.4.0
3
+ Version: 1.4.1
4
4
  Summary: Convert Informatica PowerCenter workflow XML to Python/PySpark code
5
5
  License-Expression: MIT
6
6
  Requires-Python: >=3.8
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "informatica-python"
7
- version = "1.4.0"
7
+ version = "1.4.1"
8
8
  description = "Convert Informatica PowerCenter workflow XML to Python/PySpark code"
9
9
  readme = "README.md"
10
10
  license = "MIT"