informatica-python 1.9.0__tar.gz → 1.9.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. {informatica_python-1.9.0 → informatica_python-1.9.1}/PKG-INFO +1 -1
  2. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/__init__.py +1 -1
  3. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/generators/mapping_gen.py +34 -24
  4. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python.egg-info/PKG-INFO +1 -1
  5. {informatica_python-1.9.0 → informatica_python-1.9.1}/pyproject.toml +1 -1
  6. {informatica_python-1.9.0 → informatica_python-1.9.1}/tests/test_integration.py +75 -0
  7. {informatica_python-1.9.0 → informatica_python-1.9.1}/LICENSE +0 -0
  8. {informatica_python-1.9.0 → informatica_python-1.9.1}/README.md +0 -0
  9. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/cli.py +0 -0
  10. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/converter.py +0 -0
  11. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/generators/__init__.py +0 -0
  12. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/generators/config_gen.py +0 -0
  13. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/generators/error_log_gen.py +0 -0
  14. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/generators/helper_gen.py +0 -0
  15. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/generators/sql_gen.py +0 -0
  16. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/generators/workflow_gen.py +0 -0
  17. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/models.py +0 -0
  18. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/parser.py +0 -0
  19. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/utils/__init__.py +0 -0
  20. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/utils/datatype_map.py +0 -0
  21. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/utils/expression_converter.py +0 -0
  22. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/utils/lib_adapters.py +0 -0
  23. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python/utils/sql_dialect.py +0 -0
  24. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python.egg-info/SOURCES.txt +0 -0
  25. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python.egg-info/dependency_links.txt +0 -0
  26. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python.egg-info/entry_points.txt +0 -0
  27. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python.egg-info/requires.txt +0 -0
  28. {informatica_python-1.9.0 → informatica_python-1.9.1}/informatica_python.egg-info/top_level.txt +0 -0
  29. {informatica_python-1.9.0 → informatica_python-1.9.1}/setup.cfg +0 -0
  30. {informatica_python-1.9.0 → informatica_python-1.9.1}/tests/test_converter.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: informatica-python
3
- Version: 1.9.0
3
+ Version: 1.9.1
4
4
  Summary: Convert Informatica PowerCenter workflow XML to Python/PySpark code
5
5
  Author: Nick
6
6
  License: MIT
@@ -7,7 +7,7 @@ Licensed under the MIT License.
7
7
 
8
8
  from informatica_python.converter import InformaticaConverter
9
9
 
10
- __version__ = "1.9.0"
10
+ __version__ = "1.9.1"
11
11
  __author__ = "Nick"
12
12
  __license__ = "MIT"
13
13
  __all__ = ["InformaticaConverter"]
@@ -601,8 +601,19 @@ def _generate_source_qualifier(lines, sq, source_map, source_dfs, connector_grap
601
601
  lines.append(f" execute_sql(config, '''{pre_sql}''')")
602
602
  lines.append("")
603
603
 
604
- if sql_override:
605
- src_name = next(iter(connected_sources)) if connected_sources else "source"
604
+ if not connected_sources:
605
+ sq_src_name = sq.name[3:] if sq.name.upper().startswith("SQ_") else sq.name
606
+ if sql_override:
607
+ lines.append(f" sql_{sq_safe} = '''")
608
+ for sql_line in sql_override.strip().split("\n"):
609
+ lines.append(f" {sql_line}")
610
+ lines.append(f" '''")
611
+ lines.append(f" df_{sq_safe} = read_from_db(config, sql_{sq_safe}, 'default')")
612
+ else:
613
+ lines.append(f" df_{sq_safe} = read_file(config.get('sources', {{}}).get('{sq_src_name}', {{}}).get('file_path', '{sq_src_name}'),")
614
+ lines.append(f" config.get('sources', {{}}).get('{sq_src_name}', {{}}))")
615
+ elif sql_override:
616
+ src_name = next(iter(connected_sources))
606
617
  src_def = source_map.get(src_name, SourceDef(name=src_name))
607
618
  sq_override = (session_overrides or {}).get(sq.name, {}) or (session_overrides or {}).get(src_name, {})
608
619
  conn_name = sq_override.get("connection_name") or (_safe_name(src_def.db_name) if src_def.db_name else "default")
@@ -612,36 +623,35 @@ def _generate_source_qualifier(lines, sq, source_map, source_dfs, connector_grap
612
623
  lines.append(f" {sql_line}")
613
624
  lines.append(f" '''")
614
625
  lines.append(f" df_{sq_safe} = read_from_db(config, sql_{sq_safe}, '{conn_name}')")
626
+ elif len(connected_sources) == 1:
627
+ src_name = next(iter(connected_sources))
628
+ src_def = source_map.get(src_name, SourceDef(name=src_name))
629
+ safe_src = _safe_name(src_name)
630
+ src_override = (session_overrides or {}).get(sq.name, {}) or (session_overrides or {}).get(src_name, {})
631
+ if src_def.database_type and src_def.database_type != "Flat File":
632
+ conn_name = src_override.get("connection_name") or (_safe_name(src_def.db_name) if src_def.db_name else "default")
633
+ schema = src_def.owner_name or "dbo"
634
+ cols = ", ".join(f.name for f in src_def.fields) if src_def.fields else "*"
635
+ lines.append(f" df_{sq_safe} = read_from_db(config, 'SELECT {cols} FROM {schema}.{src_def.name}', '{conn_name}')")
636
+ elif src_def.flatfile:
637
+ _emit_flatfile_read(lines, sq_safe, src_def)
638
+ else:
639
+ lines.append(f" df_{sq_safe} = read_file(config.get('sources', {{}}).get('{src_def.name}', {{}}).get('file_path', '{src_def.name}'),")
640
+ lines.append(f" config.get('sources', {{}}).get('{src_def.name}', {{}}))")
615
641
  else:
616
- if len(connected_sources) == 1:
617
- src_name = next(iter(connected_sources))
642
+ for src_name in connected_sources:
618
643
  src_def = source_map.get(src_name, SourceDef(name=src_name))
619
644
  safe_src = _safe_name(src_name)
620
- src_override = (session_overrides or {}).get(sq.name, {}) or (session_overrides or {}).get(src_name, {})
621
645
  if src_def.database_type and src_def.database_type != "Flat File":
622
- conn_name = src_override.get("connection_name") or (_safe_name(src_def.db_name) if src_def.db_name else "default")
646
+ conn_name = _safe_name(src_def.db_name) if src_def.db_name else "default"
623
647
  schema = src_def.owner_name or "dbo"
624
- cols = ", ".join(f.name for f in src_def.fields) if src_def.fields else "*"
625
- lines.append(f" df_{sq_safe} = read_from_db(config, 'SELECT {cols} FROM {schema}.{src_def.name}', '{conn_name}')")
648
+ lines.append(f" df_{safe_src} = read_from_db(config, 'SELECT * FROM {schema}.{src_def.name}', '{conn_name}')")
626
649
  elif src_def.flatfile:
627
- _emit_flatfile_read(lines, sq_safe, src_def)
650
+ _emit_flatfile_read(lines, safe_src, src_def)
628
651
  else:
629
- lines.append(f" df_{sq_safe} = read_file(config.get('sources', {{}}).get('{src_def.name}', {{}}).get('file_path', '{src_def.name}'),")
652
+ lines.append(f" df_{safe_src} = read_file(config.get('sources', {{}}).get('{src_def.name}', {{}}).get('file_path', '{src_def.name}'),")
630
653
  lines.append(f" config.get('sources', {{}}).get('{src_def.name}', {{}}))")
631
- else:
632
- for src_name in connected_sources:
633
- src_def = source_map.get(src_name, SourceDef(name=src_name))
634
- safe_src = _safe_name(src_name)
635
- if src_def.database_type and src_def.database_type != "Flat File":
636
- conn_name = _safe_name(src_def.db_name) if src_def.db_name else "default"
637
- schema = src_def.owner_name or "dbo"
638
- lines.append(f" df_{safe_src} = read_from_db(config, 'SELECT * FROM {schema}.{src_def.name}', '{conn_name}')")
639
- elif src_def.flatfile:
640
- _emit_flatfile_read(lines, safe_src, src_def)
641
- else:
642
- lines.append(f" df_{safe_src} = read_file(config.get('sources', {{}}).get('{src_def.name}', {{}}).get('file_path', '{src_def.name}'),")
643
- lines.append(f" config.get('sources', {{}}).get('{src_def.name}', {{}}))")
644
- lines.append(f" df_{sq_safe} = df_{_safe_name(next(iter(connected_sources)))}")
654
+ lines.append(f" df_{sq_safe} = df_{_safe_name(next(iter(connected_sources)))}")
645
655
 
646
656
  source_dfs[sq.name] = f"df_{sq_safe}"
647
657
  lines.append(f" try:")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: informatica-python
3
- Version: 1.9.0
3
+ Version: 1.9.1
4
4
  Summary: Convert Informatica PowerCenter workflow XML to Python/PySpark code
5
5
  Author: Nick
6
6
  License: MIT
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "informatica-python"
7
- version = "1.9.0"
7
+ version = "1.9.1"
8
8
  description = "Convert Informatica PowerCenter workflow XML to Python/PySpark code"
9
9
  readme = "README.md"
10
10
  license = {text = "MIT"}
@@ -3,6 +3,7 @@ import sys
3
3
  import csv
4
4
  import tempfile
5
5
  import shutil
6
+ import unittest
6
7
  import pytest
7
8
  from informatica_python.converter import InformaticaConverter
8
9
  from informatica_python.utils.expression_converter import (
@@ -1536,3 +1537,77 @@ class TestGeneratedCodeDocumentation:
1536
1537
  folder = FolderDef(name="TestFolder", mappings=[mapping])
1537
1538
  code = generate_mapping_code(mapping, folder, "pandas", 1)
1538
1539
  assert "..." in code
1540
+
1541
+
1542
+ class TestSourceQualifierNoSources(unittest.TestCase):
1543
+
1544
+ def test_sq_with_no_connected_sources_and_empty_source_map(self):
1545
+ from informatica_python.models import (
1546
+ FolderDef, MappingDef, TransformationDef,
1547
+ InstanceDef, ConnectorDef, FieldDef, TableAttribute,
1548
+ )
1549
+ from informatica_python.generators.mapping_gen import generate_mapping_code
1550
+ mapping = MappingDef(
1551
+ name="m_orphan_sq",
1552
+ transformations=[
1553
+ TransformationDef(name="SQ_ORPHAN", type="Source Qualifier", fields=[], attributes=[]),
1554
+ ],
1555
+ connectors=[],
1556
+ instances=[
1557
+ InstanceDef(name="SQ_ORPHAN", type="Source Qualifier", transformation_name="SQ_ORPHAN"),
1558
+ InstanceDef(name="TGT1", type="Target Definition", transformation_name="TGT1"),
1559
+ ],
1560
+ )
1561
+ folder = FolderDef(name="TestFolder", sources=[], targets=[], mappings=[mapping])
1562
+ code = generate_mapping_code(mapping, folder, "pandas", 1)
1563
+ assert "df_sq_orphan" in code
1564
+ assert "read_file" in code or "read_from_db" in code
1565
+
1566
+ def test_sq_with_no_connectors_but_sources_exist(self):
1567
+ from informatica_python.models import (
1568
+ FolderDef, MappingDef, TransformationDef,
1569
+ InstanceDef, ConnectorDef, FieldDef, TableAttribute,
1570
+ )
1571
+ from informatica_python.generators.mapping_gen import generate_mapping_code
1572
+ mapping = MappingDef(
1573
+ name="m_disconnect",
1574
+ transformations=[
1575
+ TransformationDef(name="SQ_SRC1", type="Source Qualifier", fields=[], attributes=[]),
1576
+ ],
1577
+ connectors=[],
1578
+ instances=[
1579
+ InstanceDef(name="SRC1", type="Source Definition", transformation_name="SRC1"),
1580
+ InstanceDef(name="SQ_SRC1", type="Source Qualifier", transformation_name="SQ_SRC1"),
1581
+ InstanceDef(name="TGT1", type="Target Definition", transformation_name="TGT1"),
1582
+ ],
1583
+ )
1584
+ folder = FolderDef(name="TestFolder", sources=[], targets=[], mappings=[mapping])
1585
+ code = generate_mapping_code(mapping, folder, "pandas", 1)
1586
+ assert "df_sq_src1" in code
1587
+
1588
+ def test_sq_with_sql_override_no_sources(self):
1589
+ from informatica_python.models import (
1590
+ FolderDef, MappingDef, TransformationDef,
1591
+ InstanceDef, ConnectorDef, FieldDef, TableAttribute,
1592
+ )
1593
+ from informatica_python.generators.mapping_gen import generate_mapping_code
1594
+ mapping = MappingDef(
1595
+ name="m_sql_only",
1596
+ transformations=[
1597
+ TransformationDef(
1598
+ name="SQ_SQL",
1599
+ type="Source Qualifier",
1600
+ fields=[],
1601
+ attributes=[TableAttribute(name="Sql Query", value="SELECT * FROM my_table")],
1602
+ ),
1603
+ ],
1604
+ connectors=[],
1605
+ instances=[
1606
+ InstanceDef(name="SQ_SQL", type="Source Qualifier", transformation_name="SQ_SQL"),
1607
+ InstanceDef(name="TGT1", type="Target Definition", transformation_name="TGT1"),
1608
+ ],
1609
+ )
1610
+ folder = FolderDef(name="TestFolder", sources=[], targets=[], mappings=[mapping])
1611
+ code = generate_mapping_code(mapping, folder, "pandas", 1)
1612
+ assert "df_sq_sql" in code
1613
+ assert "read_from_db" in code