informatica-python 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- informatica_python/__init__.py +4 -0
- informatica_python/cli.py +83 -0
- informatica_python/converter.py +285 -0
- informatica_python/generators/__init__.py +0 -0
- informatica_python/generators/config_gen.py +159 -0
- informatica_python/generators/error_log_gen.py +140 -0
- informatica_python/generators/helper_gen.py +693 -0
- informatica_python/generators/mapping_gen.py +649 -0
- informatica_python/generators/sql_gen.py +132 -0
- informatica_python/generators/workflow_gen.py +234 -0
- informatica_python/models.py +281 -0
- informatica_python/parser.py +468 -0
- informatica_python/utils/__init__.py +0 -0
- informatica_python/utils/datatype_map.py +105 -0
- informatica_python/utils/expression_converter.py +128 -0
- informatica_python-1.0.0.dist-info/METADATA +118 -0
- informatica_python-1.0.0.dist-info/RECORD +20 -0
- informatica_python-1.0.0.dist-info/WHEEL +5 -0
- informatica_python-1.0.0.dist-info/entry_points.txt +2 -0
- informatica_python-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
from informatica_python.models import FolderDef
|
|
2
|
+
from informatica_python.utils.expression_converter import convert_sql_expression, detect_sql_dialect
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def generate_sql_file(folder: FolderDef) -> str:
|
|
6
|
+
lines = []
|
|
7
|
+
lines.append("-- ============================================================")
|
|
8
|
+
lines.append(f"-- All SQL Queries extracted from folder: {folder.name}")
|
|
9
|
+
lines.append("-- Auto-generated by informatica-python")
|
|
10
|
+
lines.append("-- ============================================================")
|
|
11
|
+
lines.append("")
|
|
12
|
+
|
|
13
|
+
sql_count = 0
|
|
14
|
+
|
|
15
|
+
for mapping in folder.mappings:
|
|
16
|
+
mapping_sqls = []
|
|
17
|
+
|
|
18
|
+
for tx in mapping.transformations:
|
|
19
|
+
for attr in tx.attributes:
|
|
20
|
+
if attr.name == "Sql Query" and attr.value and attr.value.strip():
|
|
21
|
+
sql = convert_sql_expression(attr.value)
|
|
22
|
+
dialect = detect_sql_dialect(sql)
|
|
23
|
+
mapping_sqls.append({
|
|
24
|
+
"transformation": tx.name,
|
|
25
|
+
"type": tx.type,
|
|
26
|
+
"attribute": "Sql Query",
|
|
27
|
+
"sql": sql,
|
|
28
|
+
"dialect": dialect,
|
|
29
|
+
})
|
|
30
|
+
sql_count += 1
|
|
31
|
+
|
|
32
|
+
elif attr.name == "Lookup Sql Override" and attr.value and attr.value.strip():
|
|
33
|
+
sql = convert_sql_expression(attr.value)
|
|
34
|
+
dialect = detect_sql_dialect(sql)
|
|
35
|
+
mapping_sqls.append({
|
|
36
|
+
"transformation": tx.name,
|
|
37
|
+
"type": tx.type,
|
|
38
|
+
"attribute": "Lookup SQL Override",
|
|
39
|
+
"sql": sql,
|
|
40
|
+
"dialect": dialect,
|
|
41
|
+
})
|
|
42
|
+
sql_count += 1
|
|
43
|
+
|
|
44
|
+
elif attr.name == "Pre SQL" and attr.value and attr.value.strip():
|
|
45
|
+
sql = convert_sql_expression(attr.value)
|
|
46
|
+
mapping_sqls.append({
|
|
47
|
+
"transformation": tx.name,
|
|
48
|
+
"type": tx.type,
|
|
49
|
+
"attribute": "Pre-SQL",
|
|
50
|
+
"sql": sql,
|
|
51
|
+
"dialect": detect_sql_dialect(sql),
|
|
52
|
+
})
|
|
53
|
+
sql_count += 1
|
|
54
|
+
|
|
55
|
+
elif attr.name == "Post SQL" and attr.value and attr.value.strip():
|
|
56
|
+
sql = convert_sql_expression(attr.value)
|
|
57
|
+
mapping_sqls.append({
|
|
58
|
+
"transformation": tx.name,
|
|
59
|
+
"type": tx.type,
|
|
60
|
+
"attribute": "Post-SQL",
|
|
61
|
+
"sql": sql,
|
|
62
|
+
"dialect": detect_sql_dialect(sql),
|
|
63
|
+
})
|
|
64
|
+
sql_count += 1
|
|
65
|
+
|
|
66
|
+
elif attr.name == "User Defined Join" and attr.value and attr.value.strip():
|
|
67
|
+
sql = convert_sql_expression(attr.value)
|
|
68
|
+
mapping_sqls.append({
|
|
69
|
+
"transformation": tx.name,
|
|
70
|
+
"type": tx.type,
|
|
71
|
+
"attribute": "User Defined Join",
|
|
72
|
+
"sql": sql,
|
|
73
|
+
"dialect": detect_sql_dialect(sql),
|
|
74
|
+
})
|
|
75
|
+
sql_count += 1
|
|
76
|
+
|
|
77
|
+
elif attr.name == "Source Filter" and attr.value and attr.value.strip():
|
|
78
|
+
sql = convert_sql_expression(attr.value)
|
|
79
|
+
mapping_sqls.append({
|
|
80
|
+
"transformation": tx.name,
|
|
81
|
+
"type": tx.type,
|
|
82
|
+
"attribute": "Source Filter",
|
|
83
|
+
"sql": sql,
|
|
84
|
+
"dialect": detect_sql_dialect(sql),
|
|
85
|
+
})
|
|
86
|
+
sql_count += 1
|
|
87
|
+
|
|
88
|
+
if mapping_sqls:
|
|
89
|
+
lines.append(f"-- ============================================================")
|
|
90
|
+
lines.append(f"-- Mapping: {mapping.name}")
|
|
91
|
+
lines.append(f"-- ============================================================")
|
|
92
|
+
lines.append("")
|
|
93
|
+
|
|
94
|
+
for sq in mapping_sqls:
|
|
95
|
+
lines.append(f"-- Transformation: {sq['transformation']} ({sq['type']})")
|
|
96
|
+
lines.append(f"-- Attribute: {sq['attribute']}")
|
|
97
|
+
lines.append(f"-- Detected dialect: {sq['dialect']}")
|
|
98
|
+
lines.append(f"-- ----")
|
|
99
|
+
lines.append(sq["sql"].rstrip())
|
|
100
|
+
lines.append("")
|
|
101
|
+
lines.append("")
|
|
102
|
+
|
|
103
|
+
for session in folder.sessions:
|
|
104
|
+
session_sqls = []
|
|
105
|
+
for sti in session.transform_instances:
|
|
106
|
+
for attr in sti.attributes:
|
|
107
|
+
if "sql" in attr.name.lower() and attr.value and attr.value.strip():
|
|
108
|
+
sql = convert_sql_expression(attr.value)
|
|
109
|
+
session_sqls.append({
|
|
110
|
+
"instance": sti.instance_name,
|
|
111
|
+
"attribute": attr.name,
|
|
112
|
+
"sql": sql,
|
|
113
|
+
})
|
|
114
|
+
sql_count += 1
|
|
115
|
+
|
|
116
|
+
if session_sqls:
|
|
117
|
+
lines.append(f"-- ============================================================")
|
|
118
|
+
lines.append(f"-- Session: {session.name}")
|
|
119
|
+
lines.append(f"-- ============================================================")
|
|
120
|
+
lines.append("")
|
|
121
|
+
for sq in session_sqls:
|
|
122
|
+
lines.append(f"-- Instance: {sq['instance']}")
|
|
123
|
+
lines.append(f"-- Attribute: {sq['attribute']}")
|
|
124
|
+
lines.append(f"-- ----")
|
|
125
|
+
lines.append(sq["sql"].rstrip())
|
|
126
|
+
lines.append("")
|
|
127
|
+
lines.append("")
|
|
128
|
+
|
|
129
|
+
lines.append(f"-- Total SQL queries extracted: {sql_count}")
|
|
130
|
+
lines.append("")
|
|
131
|
+
|
|
132
|
+
return "\n".join(lines)
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
from informatica_python.models import FolderDef, WorkflowDef, TaskInstanceDef
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def generate_workflow_code(folder: FolderDef) -> str:
|
|
5
|
+
lines = []
|
|
6
|
+
lines.append('"""')
|
|
7
|
+
lines.append(f"Workflow orchestration for folder: {folder.name}")
|
|
8
|
+
lines.append("Auto-generated by informatica-python")
|
|
9
|
+
lines.append('"""')
|
|
10
|
+
lines.append("")
|
|
11
|
+
lines.append("import sys")
|
|
12
|
+
lines.append("import logging")
|
|
13
|
+
lines.append("from datetime import datetime")
|
|
14
|
+
lines.append("from helper_functions import load_config, logger")
|
|
15
|
+
lines.append("")
|
|
16
|
+
|
|
17
|
+
for i, mapping in enumerate(folder.mappings, 1):
|
|
18
|
+
safe_name = _safe_name(mapping.name)
|
|
19
|
+
lines.append(f"from mapping_{i} import run_{safe_name}")
|
|
20
|
+
lines.append("")
|
|
21
|
+
lines.append("")
|
|
22
|
+
|
|
23
|
+
if folder.workflows:
|
|
24
|
+
for wf in folder.workflows:
|
|
25
|
+
_generate_workflow_function(lines, wf, folder)
|
|
26
|
+
else:
|
|
27
|
+
_generate_default_workflow(lines, folder)
|
|
28
|
+
|
|
29
|
+
lines.append("")
|
|
30
|
+
lines.append("if __name__ == '__main__':")
|
|
31
|
+
lines.append(" config = load_config()")
|
|
32
|
+
lines.append(" success = run_workflow(config)")
|
|
33
|
+
lines.append(" sys.exit(0 if success else 1)")
|
|
34
|
+
lines.append("")
|
|
35
|
+
|
|
36
|
+
return "\n".join(lines)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _generate_workflow_function(lines, wf: WorkflowDef, folder: FolderDef):
|
|
40
|
+
wf_safe = _safe_name(wf.name)
|
|
41
|
+
lines.append(f"def run_workflow(config, workflow_name='{wf.name}'):")
|
|
42
|
+
lines.append(f' """')
|
|
43
|
+
lines.append(f" Execute workflow: {wf.name}")
|
|
44
|
+
if wf.description:
|
|
45
|
+
lines.append(f" Description: {wf.description}")
|
|
46
|
+
lines.append(f' """')
|
|
47
|
+
lines.append(f" logger.info(f'=== Starting Workflow: {wf.name} ===')")
|
|
48
|
+
lines.append(f" wf_start = datetime.now()")
|
|
49
|
+
lines.append(f" success = True")
|
|
50
|
+
lines.append(f" failed_tasks = []")
|
|
51
|
+
lines.append("")
|
|
52
|
+
|
|
53
|
+
if wf.variables:
|
|
54
|
+
lines.append(" # Workflow Variables")
|
|
55
|
+
for var in wf.variables:
|
|
56
|
+
var_name = _safe_name(var.name.replace("$$", ""))
|
|
57
|
+
default = var.default_value or "''"
|
|
58
|
+
lines.append(f" {var_name} = {default}")
|
|
59
|
+
lines.append("")
|
|
60
|
+
|
|
61
|
+
execution_order = _get_task_execution_order(wf)
|
|
62
|
+
|
|
63
|
+
mapping_name_map = {}
|
|
64
|
+
for i, mapping in enumerate(folder.mappings, 1):
|
|
65
|
+
mapping_name_map[mapping.name] = f"run_{_safe_name(mapping.name)}"
|
|
66
|
+
|
|
67
|
+
session_to_mapping = {}
|
|
68
|
+
for session in folder.sessions:
|
|
69
|
+
if session.mapping_name:
|
|
70
|
+
session_to_mapping[session.name] = session.mapping_name
|
|
71
|
+
|
|
72
|
+
for task in execution_order:
|
|
73
|
+
task_safe = _safe_name(task.name)
|
|
74
|
+
|
|
75
|
+
if task.task_type == "Start Task":
|
|
76
|
+
lines.append(f" # Start Task: {task.name}")
|
|
77
|
+
lines.append(f" logger.info('Workflow started')")
|
|
78
|
+
lines.append("")
|
|
79
|
+
continue
|
|
80
|
+
|
|
81
|
+
if task.task_type == "Session":
|
|
82
|
+
mapping_name = session_to_mapping.get(task.task_name or task.name, "")
|
|
83
|
+
run_func = mapping_name_map.get(mapping_name, None)
|
|
84
|
+
|
|
85
|
+
lines.append(f" # Session: {task.name}")
|
|
86
|
+
lines.append(f" try:")
|
|
87
|
+
lines.append(f" logger.info('Executing session: {task.name}')")
|
|
88
|
+
if run_func:
|
|
89
|
+
lines.append(f" {run_func}(config)")
|
|
90
|
+
else:
|
|
91
|
+
lines.append(f" # TODO: Map session '{task.name}' to corresponding mapping function")
|
|
92
|
+
lines.append(f" logger.warning('Session {task.name} has no mapped function')")
|
|
93
|
+
lines.append(f" except Exception as e:")
|
|
94
|
+
lines.append(f" logger.error(f'Session {task.name} failed: {{e}}')")
|
|
95
|
+
|
|
96
|
+
if task.fail_parent_if_instance_fails == "YES":
|
|
97
|
+
lines.append(f" success = False")
|
|
98
|
+
lines.append(f" failed_tasks.append('{task.name}')")
|
|
99
|
+
else:
|
|
100
|
+
lines.append(f" logger.warning('Continuing despite failure (fail_parent=NO)')")
|
|
101
|
+
lines.append("")
|
|
102
|
+
|
|
103
|
+
elif task.task_type == "Command":
|
|
104
|
+
cmd = ""
|
|
105
|
+
for attr in task.attributes:
|
|
106
|
+
if attr.name in ("Command", "CmdLine"):
|
|
107
|
+
cmd = attr.value
|
|
108
|
+
lines.append(f" # Command Task: {task.name}")
|
|
109
|
+
lines.append(f" try:")
|
|
110
|
+
lines.append(f" import subprocess, shlex")
|
|
111
|
+
if cmd:
|
|
112
|
+
lines.append(f" subprocess.run(shlex.split({repr(cmd)}), check=True)")
|
|
113
|
+
else:
|
|
114
|
+
lines.append(f" # TODO: Configure command for task '{task.name}'")
|
|
115
|
+
lines.append(f" pass")
|
|
116
|
+
lines.append(f" except Exception as e:")
|
|
117
|
+
lines.append(f" logger.error(f'Command task {task.name} failed: {{e}}')")
|
|
118
|
+
if task.fail_parent_if_instance_fails == "YES":
|
|
119
|
+
lines.append(f" success = False")
|
|
120
|
+
lines.append(f" failed_tasks.append('{task.name}')")
|
|
121
|
+
lines.append("")
|
|
122
|
+
|
|
123
|
+
elif task.task_type == "Email Task":
|
|
124
|
+
lines.append(f" # Email Task: {task.name}")
|
|
125
|
+
lines.append(f" # TODO: Implement email notification logic")
|
|
126
|
+
lines.append(f" logger.info('Email task: {task.name} - skipped (implement email logic)')")
|
|
127
|
+
lines.append("")
|
|
128
|
+
|
|
129
|
+
elif task.task_type == "Decision":
|
|
130
|
+
lines.append(f" # Decision Task: {task.name}")
|
|
131
|
+
decision_cond = ""
|
|
132
|
+
for attr in task.attributes:
|
|
133
|
+
if attr.name == "Decision Condition":
|
|
134
|
+
decision_cond = attr.value
|
|
135
|
+
if decision_cond:
|
|
136
|
+
lines.append(f" # Condition: {decision_cond}")
|
|
137
|
+
lines.append(f" logger.info('Decision task: {task.name}')")
|
|
138
|
+
lines.append("")
|
|
139
|
+
|
|
140
|
+
elif task.task_type == "Timer":
|
|
141
|
+
lines.append(f" # Timer Task: {task.name}")
|
|
142
|
+
lines.append(f" import time")
|
|
143
|
+
lines.append(f" # TODO: Configure timer delay")
|
|
144
|
+
lines.append(f" logger.info('Timer task: {task.name}')")
|
|
145
|
+
lines.append("")
|
|
146
|
+
|
|
147
|
+
elif task.task_type == "Assignment":
|
|
148
|
+
lines.append(f" # Assignment Task: {task.name}")
|
|
149
|
+
for attr in task.attributes:
|
|
150
|
+
if attr.name and attr.value:
|
|
151
|
+
lines.append(f" # {attr.name} = {attr.value}")
|
|
152
|
+
lines.append(f" logger.info('Assignment task: {task.name}')")
|
|
153
|
+
lines.append("")
|
|
154
|
+
|
|
155
|
+
else:
|
|
156
|
+
lines.append(f" # Task: {task.name} (Type: {task.task_type})")
|
|
157
|
+
lines.append(f" logger.info('Executing task: {task.name} ({task.task_type})')")
|
|
158
|
+
lines.append(f" # TODO: Implement task logic")
|
|
159
|
+
lines.append("")
|
|
160
|
+
|
|
161
|
+
lines.append(f" elapsed = (datetime.now() - wf_start).total_seconds()")
|
|
162
|
+
lines.append(f" if success:")
|
|
163
|
+
lines.append(f" logger.info(f'=== Workflow {wf.name} completed successfully in {{elapsed:.2f}}s ===')")
|
|
164
|
+
lines.append(f" else:")
|
|
165
|
+
lines.append(f" logger.error(f'=== Workflow {wf.name} failed in {{elapsed:.2f}}s. Failed tasks: {{failed_tasks}} ===')")
|
|
166
|
+
lines.append(f" return success")
|
|
167
|
+
lines.append("")
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def _generate_default_workflow(lines, folder):
|
|
171
|
+
lines.append("def run_workflow(config, workflow_name='default'):")
|
|
172
|
+
lines.append(' """Execute all mappings in order."""')
|
|
173
|
+
lines.append(f" logger.info('=== Starting Default Workflow ===')")
|
|
174
|
+
lines.append(f" wf_start = datetime.now()")
|
|
175
|
+
lines.append(f" success = True")
|
|
176
|
+
lines.append("")
|
|
177
|
+
|
|
178
|
+
for i, mapping in enumerate(folder.mappings, 1):
|
|
179
|
+
safe_name = _safe_name(mapping.name)
|
|
180
|
+
lines.append(f" try:")
|
|
181
|
+
lines.append(f" logger.info('Executing mapping {i}: {mapping.name}')")
|
|
182
|
+
lines.append(f" run_{safe_name}(config)")
|
|
183
|
+
lines.append(f" except Exception as e:")
|
|
184
|
+
lines.append(f" logger.error(f'Mapping {mapping.name} failed: {{e}}')")
|
|
185
|
+
lines.append(f" success = False")
|
|
186
|
+
lines.append("")
|
|
187
|
+
|
|
188
|
+
lines.append(f" elapsed = (datetime.now() - wf_start).total_seconds()")
|
|
189
|
+
lines.append(f" logger.info(f'=== Workflow completed in {{elapsed:.2f}}s (success={{success}}) ===')")
|
|
190
|
+
lines.append(f" return success")
|
|
191
|
+
lines.append("")
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def _get_task_execution_order(wf: WorkflowDef):
|
|
195
|
+
if not wf.task_instances:
|
|
196
|
+
return []
|
|
197
|
+
|
|
198
|
+
task_map = {t.name: t for t in wf.task_instances}
|
|
199
|
+
adj = {t.name: [] for t in wf.task_instances}
|
|
200
|
+
in_degree = {t.name: 0 for t in wf.task_instances}
|
|
201
|
+
|
|
202
|
+
for link in wf.links:
|
|
203
|
+
if link.from_instance in adj and link.to_instance in adj:
|
|
204
|
+
adj[link.from_instance].append(link.to_instance)
|
|
205
|
+
in_degree[link.to_instance] += 1
|
|
206
|
+
|
|
207
|
+
queue = []
|
|
208
|
+
for name, deg in in_degree.items():
|
|
209
|
+
if deg == 0:
|
|
210
|
+
queue.append(name)
|
|
211
|
+
|
|
212
|
+
ordered = []
|
|
213
|
+
while queue:
|
|
214
|
+
queue.sort()
|
|
215
|
+
node = queue.pop(0)
|
|
216
|
+
ordered.append(task_map[node])
|
|
217
|
+
for neighbor in adj.get(node, []):
|
|
218
|
+
in_degree[neighbor] -= 1
|
|
219
|
+
if in_degree[neighbor] == 0:
|
|
220
|
+
queue.append(neighbor)
|
|
221
|
+
|
|
222
|
+
for t in wf.task_instances:
|
|
223
|
+
if t not in ordered:
|
|
224
|
+
ordered.append(t)
|
|
225
|
+
|
|
226
|
+
return ordered
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def _safe_name(name):
|
|
230
|
+
import re
|
|
231
|
+
safe = re.sub(r'[^a-zA-Z0-9_]', '_', name)
|
|
232
|
+
if safe and safe[0].isdigit():
|
|
233
|
+
safe = '_' + safe
|
|
234
|
+
return safe.lower()
|
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import List, Dict, Optional
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@dataclass
|
|
6
|
+
class FieldDef:
|
|
7
|
+
name: str
|
|
8
|
+
datatype: str
|
|
9
|
+
precision: int = 0
|
|
10
|
+
scale: int = 0
|
|
11
|
+
nullable: str = "NULL"
|
|
12
|
+
keytype: str = "NOT A KEY"
|
|
13
|
+
default_value: str = ""
|
|
14
|
+
expression: str = ""
|
|
15
|
+
expression_type: str = ""
|
|
16
|
+
porttype: str = ""
|
|
17
|
+
description: str = ""
|
|
18
|
+
field_number: int = 0
|
|
19
|
+
hidden: str = "NO"
|
|
20
|
+
business_name: str = ""
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class TableAttribute:
|
|
25
|
+
name: str
|
|
26
|
+
value: str
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass
|
|
30
|
+
class SourceDef:
|
|
31
|
+
name: str
|
|
32
|
+
database_type: str = ""
|
|
33
|
+
db_name: str = ""
|
|
34
|
+
owner_name: str = ""
|
|
35
|
+
description: str = ""
|
|
36
|
+
business_name: str = ""
|
|
37
|
+
fields: List[FieldDef] = field(default_factory=list)
|
|
38
|
+
attributes: List[TableAttribute] = field(default_factory=list)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class TargetDef:
|
|
43
|
+
name: str
|
|
44
|
+
database_type: str = ""
|
|
45
|
+
description: str = ""
|
|
46
|
+
business_name: str = ""
|
|
47
|
+
constraint: str = ""
|
|
48
|
+
table_options: str = ""
|
|
49
|
+
fields: List[FieldDef] = field(default_factory=list)
|
|
50
|
+
attributes: List[TableAttribute] = field(default_factory=list)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass
|
|
54
|
+
class TransformationDef:
|
|
55
|
+
name: str
|
|
56
|
+
type: str
|
|
57
|
+
description: str = ""
|
|
58
|
+
reusable: str = "NO"
|
|
59
|
+
fields: List[FieldDef] = field(default_factory=list)
|
|
60
|
+
attributes: List[TableAttribute] = field(default_factory=list)
|
|
61
|
+
metadata: Dict[str, str] = field(default_factory=dict)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@dataclass
|
|
65
|
+
class ConnectorDef:
|
|
66
|
+
from_field: str
|
|
67
|
+
from_instance: str
|
|
68
|
+
from_instance_type: str
|
|
69
|
+
to_field: str
|
|
70
|
+
to_instance: str
|
|
71
|
+
to_instance_type: str
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@dataclass
|
|
75
|
+
class InstanceDef:
|
|
76
|
+
name: str
|
|
77
|
+
type: str
|
|
78
|
+
transformation_name: str = ""
|
|
79
|
+
transformation_type: str = ""
|
|
80
|
+
description: str = ""
|
|
81
|
+
reusable: str = "NO"
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@dataclass
|
|
85
|
+
class TargetLoadOrder:
|
|
86
|
+
order: int = 1
|
|
87
|
+
target_instance: str = ""
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@dataclass
|
|
91
|
+
class MappingVariable:
|
|
92
|
+
name: str
|
|
93
|
+
datatype: str = "string"
|
|
94
|
+
default_value: str = ""
|
|
95
|
+
description: str = ""
|
|
96
|
+
is_expression_variable: str = "NO"
|
|
97
|
+
is_persistent: str = "NO"
|
|
98
|
+
precision: int = 0
|
|
99
|
+
scale: int = 0
|
|
100
|
+
usage_type: str = ""
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@dataclass
|
|
104
|
+
class MappingDef:
|
|
105
|
+
name: str
|
|
106
|
+
description: str = ""
|
|
107
|
+
is_valid: str = "YES"
|
|
108
|
+
sources: List[SourceDef] = field(default_factory=list)
|
|
109
|
+
targets: List[TargetDef] = field(default_factory=list)
|
|
110
|
+
transformations: List[TransformationDef] = field(default_factory=list)
|
|
111
|
+
connectors: List[ConnectorDef] = field(default_factory=list)
|
|
112
|
+
instances: List[InstanceDef] = field(default_factory=list)
|
|
113
|
+
target_load_orders: List[TargetLoadOrder] = field(default_factory=list)
|
|
114
|
+
variables: List[MappingVariable] = field(default_factory=list)
|
|
115
|
+
metadata: Dict[str, str] = field(default_factory=dict)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
@dataclass
|
|
119
|
+
class ConnectionRef:
|
|
120
|
+
connection_name: str = ""
|
|
121
|
+
connection_type: str = ""
|
|
122
|
+
connection_subtype: str = ""
|
|
123
|
+
component_version: str = ""
|
|
124
|
+
variable: str = ""
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
@dataclass
|
|
128
|
+
class SessionTransformInst:
|
|
129
|
+
instance_name: str = ""
|
|
130
|
+
pipeline: str = ""
|
|
131
|
+
stage: str = ""
|
|
132
|
+
transformation_name: str = ""
|
|
133
|
+
transformation_type: str = ""
|
|
134
|
+
is_partitionable: str = "NO"
|
|
135
|
+
attributes: List[TableAttribute] = field(default_factory=list)
|
|
136
|
+
connections: List[ConnectionRef] = field(default_factory=list)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
@dataclass
|
|
140
|
+
class SessionDef:
|
|
141
|
+
name: str
|
|
142
|
+
mapping_name: str = ""
|
|
143
|
+
description: str = ""
|
|
144
|
+
is_valid: str = "YES"
|
|
145
|
+
reusable: str = "NO"
|
|
146
|
+
transform_instances: List[SessionTransformInst] = field(default_factory=list)
|
|
147
|
+
attributes: List[TableAttribute] = field(default_factory=list)
|
|
148
|
+
config_references: List[Dict[str, str]] = field(default_factory=list)
|
|
149
|
+
connections: List[ConnectionRef] = field(default_factory=list)
|
|
150
|
+
components: List[Dict] = field(default_factory=list)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
@dataclass
|
|
154
|
+
class TaskInstanceDef:
|
|
155
|
+
name: str
|
|
156
|
+
task_name: str = ""
|
|
157
|
+
task_type: str = ""
|
|
158
|
+
description: str = ""
|
|
159
|
+
is_valid: str = "YES"
|
|
160
|
+
reusable: str = "NO"
|
|
161
|
+
fail_parent_if_instance_fails: str = "YES"
|
|
162
|
+
fail_parent_if_instance_did_not_run: str = "NO"
|
|
163
|
+
treat_input_link_as_and: str = "YES"
|
|
164
|
+
attributes: List[TableAttribute] = field(default_factory=list)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
@dataclass
|
|
168
|
+
class WorkflowLink:
|
|
169
|
+
from_instance: str
|
|
170
|
+
to_instance: str
|
|
171
|
+
condition: str = ""
|
|
172
|
+
link_type: str = ""
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
@dataclass
|
|
176
|
+
class WorkflowVariable:
|
|
177
|
+
name: str
|
|
178
|
+
datatype: str = "string"
|
|
179
|
+
default_value: str = ""
|
|
180
|
+
description: str = ""
|
|
181
|
+
is_null: str = "NO"
|
|
182
|
+
is_persistent: str = "NO"
|
|
183
|
+
is_user_defined: str = "YES"
|
|
184
|
+
precision: int = 0
|
|
185
|
+
scale: int = 0
|
|
186
|
+
usage_type: str = ""
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
@dataclass
|
|
190
|
+
class SchedulerDef:
|
|
191
|
+
name: str
|
|
192
|
+
description: str = ""
|
|
193
|
+
reusable: str = "NO"
|
|
194
|
+
attributes: List[TableAttribute] = field(default_factory=list)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
@dataclass
|
|
198
|
+
class WorkflowDef:
|
|
199
|
+
name: str
|
|
200
|
+
description: str = ""
|
|
201
|
+
is_valid: str = "YES"
|
|
202
|
+
reusable: str = "NO"
|
|
203
|
+
scheduler_name: str = ""
|
|
204
|
+
task_instances: List[TaskInstanceDef] = field(default_factory=list)
|
|
205
|
+
links: List[WorkflowLink] = field(default_factory=list)
|
|
206
|
+
variables: List[WorkflowVariable] = field(default_factory=list)
|
|
207
|
+
attributes: List[TableAttribute] = field(default_factory=list)
|
|
208
|
+
metadata: Dict[str, str] = field(default_factory=dict)
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
@dataclass
|
|
212
|
+
class ConfigDef:
|
|
213
|
+
name: str
|
|
214
|
+
description: str = ""
|
|
215
|
+
is_valid: str = "YES"
|
|
216
|
+
attributes: List[TableAttribute] = field(default_factory=list)
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
@dataclass
|
|
220
|
+
class MappletDef:
|
|
221
|
+
name: str
|
|
222
|
+
description: str = ""
|
|
223
|
+
is_valid: str = "YES"
|
|
224
|
+
transformations: List[TransformationDef] = field(default_factory=list)
|
|
225
|
+
connectors: List[ConnectorDef] = field(default_factory=list)
|
|
226
|
+
instances: List[InstanceDef] = field(default_factory=list)
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
@dataclass
|
|
230
|
+
class ShortcutDef:
|
|
231
|
+
name: str
|
|
232
|
+
shortcut_type: str = ""
|
|
233
|
+
reference_name: str = ""
|
|
234
|
+
folder_name: str = ""
|
|
235
|
+
repository_name: str = ""
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
@dataclass
|
|
239
|
+
class TaskDef:
|
|
240
|
+
name: str
|
|
241
|
+
type: str = ""
|
|
242
|
+
description: str = ""
|
|
243
|
+
reusable: str = "NO"
|
|
244
|
+
attributes: List[TableAttribute] = field(default_factory=list)
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
@dataclass
|
|
248
|
+
class FolderDef:
|
|
249
|
+
name: str
|
|
250
|
+
owner: str = ""
|
|
251
|
+
description: str = ""
|
|
252
|
+
group: str = ""
|
|
253
|
+
shared: str = "NOTSHARED"
|
|
254
|
+
permissions: str = ""
|
|
255
|
+
sources: List[SourceDef] = field(default_factory=list)
|
|
256
|
+
targets: List[TargetDef] = field(default_factory=list)
|
|
257
|
+
mappings: List[MappingDef] = field(default_factory=list)
|
|
258
|
+
mapplets: List[MappletDef] = field(default_factory=list)
|
|
259
|
+
sessions: List[SessionDef] = field(default_factory=list)
|
|
260
|
+
workflows: List[WorkflowDef] = field(default_factory=list)
|
|
261
|
+
tasks: List[TaskDef] = field(default_factory=list)
|
|
262
|
+
configs: List[ConfigDef] = field(default_factory=list)
|
|
263
|
+
schedulers: List[SchedulerDef] = field(default_factory=list)
|
|
264
|
+
shortcuts: List[ShortcutDef] = field(default_factory=list)
|
|
265
|
+
transformations: List[TransformationDef] = field(default_factory=list)
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
@dataclass
|
|
269
|
+
class RepositoryDef:
|
|
270
|
+
name: str
|
|
271
|
+
version: str = ""
|
|
272
|
+
codepage: str = ""
|
|
273
|
+
database_type: str = ""
|
|
274
|
+
folders: List[FolderDef] = field(default_factory=list)
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
@dataclass
|
|
278
|
+
class PowermartDef:
|
|
279
|
+
creation_date: str = ""
|
|
280
|
+
repository_version: str = ""
|
|
281
|
+
repositories: List[RepositoryDef] = field(default_factory=list)
|