tricc-oo 1.5.26__py3-none-any.whl → 1.6.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tests/build.py +1 -0
- tests/test_build.py +260 -0
- tricc_oo/converters/tricc_to_xls_form.py +15 -6
- tricc_oo/converters/xml_to_tricc.py +9 -8
- tricc_oo/models/base.py +4 -2
- tricc_oo/serializers/xls_form.py +34 -18
- tricc_oo/strategies/output/base_output_strategy.py +7 -0
- tricc_oo/strategies/output/dhis2_form.py +908 -0
- tricc_oo/strategies/output/openmrs_form.py +52 -5
- tricc_oo/strategies/output/xls_form.py +62 -32
- tricc_oo/strategies/output/xlsform_cht.py +107 -0
- tricc_oo/visitors/tricc.py +145 -71
- {tricc_oo-1.5.26.dist-info → tricc_oo-1.6.8.dist-info}/METADATA +2 -1
- {tricc_oo-1.5.26.dist-info → tricc_oo-1.6.8.dist-info}/RECORD +17 -15
- {tricc_oo-1.5.26.dist-info → tricc_oo-1.6.8.dist-info}/WHEEL +0 -0
- {tricc_oo-1.5.26.dist-info → tricc_oo-1.6.8.dist-info}/licenses/LICENSE +0 -0
- {tricc_oo-1.5.26.dist-info → tricc_oo-1.6.8.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,908 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import json
|
|
4
|
+
import uuid
|
|
5
|
+
import string
|
|
6
|
+
from tricc_oo.visitors.tricc import (
|
|
7
|
+
is_ready_to_process,
|
|
8
|
+
process_reference,
|
|
9
|
+
generate_base,
|
|
10
|
+
generate_calculate,
|
|
11
|
+
walktrhough_tricc_node_processed_stached,
|
|
12
|
+
check_stashed_loop,
|
|
13
|
+
)
|
|
14
|
+
from tricc_oo.converters.tricc_to_xls_form import get_export_name
|
|
15
|
+
import datetime
|
|
16
|
+
from tricc_oo.strategies.output.base_output_strategy import BaseOutPutStrategy
|
|
17
|
+
from tricc_oo.models.base import (
|
|
18
|
+
not_clean, TriccOperation,
|
|
19
|
+
TriccStatic, TriccReference
|
|
20
|
+
)
|
|
21
|
+
from tricc_oo.models.tricc import (
|
|
22
|
+
TriccNodeSelectOption,
|
|
23
|
+
TriccNodeInputModel,
|
|
24
|
+
TriccNodeBaseModel,
|
|
25
|
+
TriccNodeDisplayModel,
|
|
26
|
+
TriccNodeCalculateBase,
|
|
27
|
+
TriccNodeActivity,
|
|
28
|
+
TriccNodeSelect,
|
|
29
|
+
TriccNodeSelectYesNo,
|
|
30
|
+
)
|
|
31
|
+
from tricc_oo.models.calculate import TriccNodeDisplayCalculateBase
|
|
32
|
+
from tricc_oo.models.ordered_set import OrderedSet
|
|
33
|
+
|
|
34
|
+
logger = logging.getLogger("default")
|
|
35
|
+
|
|
36
|
+
# Namespace for deterministic UUIDs
|
|
37
|
+
UUID_NAMESPACE = uuid.UUID('87654321-4321-8765-cba9-fed098765432')
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class DHIS2Strategy(BaseOutPutStrategy):
|
|
41
|
+
processes = ["main"]
|
|
42
|
+
project = None
|
|
43
|
+
output_path = None
|
|
44
|
+
|
|
45
|
+
def __init__(self, project, output_path):
|
|
46
|
+
super().__init__(project, output_path)
|
|
47
|
+
form_id = getattr(self.project.start_pages["main"], 'form_id', 'dhis2_program')
|
|
48
|
+
self.program_metadata = {
|
|
49
|
+
"id": self.generate_id(form_id),
|
|
50
|
+
"name": form_id,
|
|
51
|
+
"shortName": form_id[:50], # DHIS2 shortName limit
|
|
52
|
+
"programType": "WITHOUT_REGISTRATION",
|
|
53
|
+
"programStages": [],
|
|
54
|
+
"programRules": []
|
|
55
|
+
}
|
|
56
|
+
self.option_sets = {}
|
|
57
|
+
self.options = {}
|
|
58
|
+
self.data_elements = {}
|
|
59
|
+
self.program_rules = []
|
|
60
|
+
self.program_rule_actions = []
|
|
61
|
+
self.program_rule_variables = []
|
|
62
|
+
self.field_counter = 1
|
|
63
|
+
self.current_section = None
|
|
64
|
+
self.concept_map = {}
|
|
65
|
+
# Track programRuleActions per stage
|
|
66
|
+
self.stage_rule_actions = {}
|
|
67
|
+
self.sections = {}
|
|
68
|
+
|
|
69
|
+
def get_export_name(self, r):
|
|
70
|
+
if isinstance(r, TriccNodeSelectOption):
|
|
71
|
+
ret = self.get_option_value(r.name)
|
|
72
|
+
elif isinstance(r, str):
|
|
73
|
+
ret = self.get_option_value(r)
|
|
74
|
+
elif isinstance(r, TriccStatic):
|
|
75
|
+
if isinstance(r.value, str):
|
|
76
|
+
ret = self.get_option_value(r.value)
|
|
77
|
+
elif isinstance(r.value, bool):
|
|
78
|
+
ret = str(r.value).lower()
|
|
79
|
+
else:
|
|
80
|
+
ret = r.value
|
|
81
|
+
else:
|
|
82
|
+
ret = get_export_name(r)
|
|
83
|
+
if isinstance(ret, str):
|
|
84
|
+
return ret[:50]
|
|
85
|
+
else:
|
|
86
|
+
return ret
|
|
87
|
+
|
|
88
|
+
def generate_id(self, name):
|
|
89
|
+
"""Generate DHIS2-compliant UID: 1 letter + 10 alphanumeric characters"""
|
|
90
|
+
# Convert UUID to base62-like string and take first 11 chars, ensuring starts with letter
|
|
91
|
+
# Create DHIS2 UID: start with letter, followed by 10 alphanum chars
|
|
92
|
+
letters = string.ascii_letters
|
|
93
|
+
alphanum = string.ascii_letters + string.digits
|
|
94
|
+
|
|
95
|
+
# Use hash of the name to get deterministic but varied results
|
|
96
|
+
import hashlib
|
|
97
|
+
hash_obj = hashlib.md5(name.encode('utf-8')).digest()
|
|
98
|
+
hash_int = int.from_bytes(hash_obj, byteorder='big')
|
|
99
|
+
|
|
100
|
+
# First character: letter
|
|
101
|
+
first_char = letters[hash_int % len(letters)]
|
|
102
|
+
|
|
103
|
+
# Remaining 10 characters: alphanumeric
|
|
104
|
+
remaining_chars = ''
|
|
105
|
+
for i in range(10):
|
|
106
|
+
remaining_chars += alphanum[(hash_int >> (i * 6)) % len(alphanum)]
|
|
107
|
+
|
|
108
|
+
return first_char + remaining_chars
|
|
109
|
+
|
|
110
|
+
def get_option_value(self, option_name):
|
|
111
|
+
if option_name == 'true':
|
|
112
|
+
return TriccStatic(True)
|
|
113
|
+
elif option_name == 'false':
|
|
114
|
+
return TriccStatic(False)
|
|
115
|
+
return self.concept_map.get(option_name, option_name)
|
|
116
|
+
|
|
117
|
+
def get_tricc_operation_expression(self, operation):
|
|
118
|
+
ref_expressions = []
|
|
119
|
+
if not hasattr(operation, "reference"):
|
|
120
|
+
return self.get_tricc_operation_operand(operation)
|
|
121
|
+
for r in operation.reference:
|
|
122
|
+
if isinstance(r, list):
|
|
123
|
+
r_expr = [
|
|
124
|
+
(
|
|
125
|
+
self.get_tricc_operation_expression(sr)
|
|
126
|
+
if isinstance(sr, TriccOperation)
|
|
127
|
+
else self.get_tricc_operation_operand(sr)
|
|
128
|
+
)
|
|
129
|
+
for sr in r
|
|
130
|
+
]
|
|
131
|
+
elif isinstance(r, TriccOperation):
|
|
132
|
+
r_expr = self.get_tricc_operation_expression(r)
|
|
133
|
+
else:
|
|
134
|
+
r_expr = self.get_tricc_operation_operand(r)
|
|
135
|
+
if isinstance(r_expr, TriccReference):
|
|
136
|
+
r_expr = self.get_tricc_operation_operand(r_expr)
|
|
137
|
+
elif isinstance(r_expr, TriccStatic) and isinstance(r_expr.value, bool):
|
|
138
|
+
r_expr = str(r_expr.value).lower()
|
|
139
|
+
ref_expressions.append(r_expr)
|
|
140
|
+
|
|
141
|
+
if hasattr(self, f"tricc_operation_{operation.operator}"):
|
|
142
|
+
callable = getattr(self, f"tricc_operation_{operation.operator}")
|
|
143
|
+
return callable(ref_expressions)
|
|
144
|
+
else:
|
|
145
|
+
raise NotImplementedError(
|
|
146
|
+
f"This type of operation '{operation.operator}' is not supported in this strategy"
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
def get_display(self, node):
|
|
150
|
+
if hasattr(node, 'label') and node.label:
|
|
151
|
+
ret = node.label
|
|
152
|
+
elif hasattr(node, 'name') and node.name:
|
|
153
|
+
ret = node.name
|
|
154
|
+
else:
|
|
155
|
+
ret = str(node.id)
|
|
156
|
+
return ret.replace('\u00a0', ' ').strip()
|
|
157
|
+
|
|
158
|
+
def execute(self):
|
|
159
|
+
version = datetime.datetime.now().strftime("%Y%m%d%H%M")
|
|
160
|
+
logger.info(f"build version: {version}")
|
|
161
|
+
if "main" in self.project.start_pages:
|
|
162
|
+
self.process_base(self.project.start_pages, pages=self.project.pages, version=version)
|
|
163
|
+
else:
|
|
164
|
+
logger.critical("Main process required")
|
|
165
|
+
|
|
166
|
+
logger.info("generate the relevance based on edges")
|
|
167
|
+
self.process_relevance(self.project.start_pages, pages=self.project.pages)
|
|
168
|
+
|
|
169
|
+
logger.info("generate the calculate based on edges")
|
|
170
|
+
self.process_calculate(self.project.start_pages, pages=self.project.pages)
|
|
171
|
+
|
|
172
|
+
logger.info("generate the export format")
|
|
173
|
+
self.process_export(self.project.start_pages, pages=self.project.pages)
|
|
174
|
+
|
|
175
|
+
logger.info("print the export")
|
|
176
|
+
self.export(self.project.start_pages, version=version)
|
|
177
|
+
|
|
178
|
+
def map_tricc_type_to_dhis2_value_type(self, node):
|
|
179
|
+
mapping = {
|
|
180
|
+
'text': 'TEXT',
|
|
181
|
+
'integer': 'INTEGER',
|
|
182
|
+
'decimal': 'NUMBER',
|
|
183
|
+
'date': 'DATE',
|
|
184
|
+
'datetime': 'DATETIME',
|
|
185
|
+
'select_one': 'TEXT', # DHIS2 handles options via optionSets
|
|
186
|
+
'select_multiple': 'TEXT', # Multiple selections as comma-separated
|
|
187
|
+
'select_yesno': 'BOOLEAN',
|
|
188
|
+
'yesno': 'BOOLEAN',
|
|
189
|
+
'boolean': 'BOOLEAN',
|
|
190
|
+
'not_available': 'BOOLEAN',
|
|
191
|
+
'note': 'LONG_TEXT'
|
|
192
|
+
}
|
|
193
|
+
return mapping.get(node.tricc_type, 'TEXT')
|
|
194
|
+
|
|
195
|
+
def generate_base(self, node, processed_nodes, **kwargs):
|
|
196
|
+
if generate_base(node, processed_nodes, **kwargs):
|
|
197
|
+
if getattr(node, 'name', '') not in ('true', 'false'):
|
|
198
|
+
self.concept_map[node.name] = self.generate_id(self.get_export_name(node))
|
|
199
|
+
return True
|
|
200
|
+
return False
|
|
201
|
+
|
|
202
|
+
def generate_relevance(self, node, processed_nodes, **kwargs):
|
|
203
|
+
if not is_ready_to_process(node, processed_nodes, strict=True):
|
|
204
|
+
return False
|
|
205
|
+
|
|
206
|
+
if node not in processed_nodes:
|
|
207
|
+
relevance = None
|
|
208
|
+
if hasattr(node, 'relevance') and node.relevance:
|
|
209
|
+
relevance = node.relevance
|
|
210
|
+
if hasattr(node, 'expression') and node.expression:
|
|
211
|
+
relevance = node.expression
|
|
212
|
+
if relevance:
|
|
213
|
+
relevance_str = self.convert_expression_to_string(not_clean(relevance))
|
|
214
|
+
if relevance_str and relevance_str != 'false':
|
|
215
|
+
# Create program rule action for hiding/showing based on relevance
|
|
216
|
+
rule_id = self.generate_id(f"rule_{node.get_name()}_relevance")
|
|
217
|
+
action_id = self.generate_id(f"action_{rule_id}")
|
|
218
|
+
|
|
219
|
+
if isinstance(node, TriccNodeActivity):
|
|
220
|
+
# For activities, use HIDESECTION action instead of HIDEFIELD
|
|
221
|
+
# Store activity reference for later section ID assignment
|
|
222
|
+
program_rule_action = {
|
|
223
|
+
"id": action_id,
|
|
224
|
+
"programRuleActionType": "HIDESECTION",
|
|
225
|
+
"activity_ref": node, # Temporary reference to be replaced with section ID
|
|
226
|
+
"programRule": {"id": rule_id},
|
|
227
|
+
}
|
|
228
|
+
else:
|
|
229
|
+
# For regular nodes, use HIDEFIELD action
|
|
230
|
+
program_rule_action = {
|
|
231
|
+
"id": action_id,
|
|
232
|
+
"programRuleActionType": "HIDEFIELD",
|
|
233
|
+
"dataElement": {
|
|
234
|
+
"id": self.generate_id(self.get_export_name(node))
|
|
235
|
+
},
|
|
236
|
+
"programRule": {"id": rule_id}
|
|
237
|
+
}
|
|
238
|
+
self.program_rule_actions.append(program_rule_action)
|
|
239
|
+
|
|
240
|
+
# Create program rule referencing the action
|
|
241
|
+
condition = self.simplify_expression(f"!({relevance_str})") # Negate for hide when true
|
|
242
|
+
condition = self.simplify_expression(condition)
|
|
243
|
+
self.program_rules.append({
|
|
244
|
+
"id": rule_id,
|
|
245
|
+
"name": f"Hide `{self.get_export_name(node)}` when condition met",
|
|
246
|
+
"description": f"Hide `{self.get_display(node)}` based on relevance",
|
|
247
|
+
"condition": condition,
|
|
248
|
+
"programRuleActions": [{"id": action_id}]
|
|
249
|
+
})
|
|
250
|
+
return True
|
|
251
|
+
|
|
252
|
+
def generate_data_element(self, node):
|
|
253
|
+
if issubclass(node.__class__, TriccNodeDisplayModel) and not isinstance(node, TriccNodeSelectOption):
|
|
254
|
+
de_id = self.generate_id(self.get_export_name(node))
|
|
255
|
+
|
|
256
|
+
# Check if this is a boolean question (yes/no with boolean options)
|
|
257
|
+
is_boolean_question = False
|
|
258
|
+
if hasattr(node, 'options') and node.options:
|
|
259
|
+
option_names = [
|
|
260
|
+
str(self.get_export_name(opt)).lower()
|
|
261
|
+
for opt in node.options.values()
|
|
262
|
+
if isinstance(opt, TriccNodeSelectOption)]
|
|
263
|
+
# If options are only true/false or yes/no variants, treat as boolean
|
|
264
|
+
boolean_options = {'true', 'false', 'yes', 'no', '1', '0'}
|
|
265
|
+
if all(opt in boolean_options for opt in option_names):
|
|
266
|
+
is_boolean_question = True
|
|
267
|
+
|
|
268
|
+
# Override valueType for boolean questions
|
|
269
|
+
value_type = self.map_tricc_type_to_dhis2_value_type(node)
|
|
270
|
+
if is_boolean_question:
|
|
271
|
+
value_type = "BOOLEAN"
|
|
272
|
+
|
|
273
|
+
data_element = {
|
|
274
|
+
"id": de_id,
|
|
275
|
+
"name": self.get_export_name(node),
|
|
276
|
+
"shortName": node.name[:50],
|
|
277
|
+
"displayFormName": self.get_display(node),
|
|
278
|
+
"formName": self.get_display(node),
|
|
279
|
+
"valueType": value_type,
|
|
280
|
+
"domainType": "TRACKER",
|
|
281
|
+
"aggregationType": "NONE"
|
|
282
|
+
}
|
|
283
|
+
if issubclass(node.__class__, TriccNodeSelect) and not isinstance(node, TriccNodeSelectYesNo):
|
|
284
|
+
data_element["optionSetValue"] = True
|
|
285
|
+
|
|
286
|
+
# Only create optionSet for non-boolean select questions
|
|
287
|
+
if node.tricc_type in ['select_one', 'select_multiple'] and not is_boolean_question:
|
|
288
|
+
# Create optionSet for choices
|
|
289
|
+
if hasattr(node, 'options') and node.options:
|
|
290
|
+
option_set_id = self.generate_id(f"optionset_{node.name}")
|
|
291
|
+
data_element["optionSet"] = {"id": option_set_id}
|
|
292
|
+
|
|
293
|
+
# Create the actual optionSet definition
|
|
294
|
+
option_set = {
|
|
295
|
+
"id": option_set_id,
|
|
296
|
+
"name": f"{self.get_export_name(node)} Options",
|
|
297
|
+
"shortName": f"{node.name}_opts"[:50],
|
|
298
|
+
"valueType": "TEXT",
|
|
299
|
+
"options": []
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
# Add options (node.options is a dict, not a list)
|
|
303
|
+
for key, option in node.options.items():
|
|
304
|
+
if isinstance(option, TriccNodeSelectOption):
|
|
305
|
+
option_id = self.generate_id(f"option_{node.name}_{option.name}")
|
|
306
|
+
option_name = self.get_export_name(option)
|
|
307
|
+
if isinstance(option_name, str):
|
|
308
|
+
option_name = option_name.replace('\u00a0', ' ').strip()
|
|
309
|
+
elif isinstance(option_name, TriccStatic):
|
|
310
|
+
option_name = str(option_name.value)
|
|
311
|
+
# Create separate option entityif
|
|
312
|
+
option_def = {
|
|
313
|
+
"id": option_id,
|
|
314
|
+
"name": self.get_display(option),
|
|
315
|
+
"shortName": option.name[:50],
|
|
316
|
+
"code": str(self.get_export_name(option))
|
|
317
|
+
}
|
|
318
|
+
self.options[option_id] = option_def
|
|
319
|
+
|
|
320
|
+
# Add option reference to optionSet (only ID)
|
|
321
|
+
option_set["options"].append({"id": option_id})
|
|
322
|
+
|
|
323
|
+
self.option_sets[option_set_id] = option_set
|
|
324
|
+
|
|
325
|
+
self.data_elements[node.name] = data_element
|
|
326
|
+
|
|
327
|
+
# Create program rule variable for this data element
|
|
328
|
+
var_id = self.generate_id(f"var_{node.name}")
|
|
329
|
+
var_name = self.get_export_name(node)
|
|
330
|
+
program_rule_variable = {
|
|
331
|
+
"id": var_id,
|
|
332
|
+
"name": var_name,
|
|
333
|
+
"programRuleVariableSourceType": "DATAELEMENT_CURRENT_EVENT",
|
|
334
|
+
"dataElement": {"id": de_id},
|
|
335
|
+
"program": {"id": self.program_metadata["id"]}
|
|
336
|
+
}
|
|
337
|
+
self.program_rule_variables.append(program_rule_variable)
|
|
338
|
+
self.concept_map[node.name] = var_name # Store variable name for #{var_name} references
|
|
339
|
+
|
|
340
|
+
return data_element
|
|
341
|
+
return None
|
|
342
|
+
|
|
343
|
+
def generate_calculate(self, node, processed_nodes, **kwargs):
|
|
344
|
+
if generate_calculate(node, processed_nodes, **kwargs):
|
|
345
|
+
if issubclass(node.__class__, TriccNodeCalculateBase) and node.expression:
|
|
346
|
+
# Create program rule variable for the calculate
|
|
347
|
+
var_id = self.generate_id(self.get_export_name(node))
|
|
348
|
+
expression_str = self.convert_expression_to_string(node.expression)
|
|
349
|
+
|
|
350
|
+
# Determine data type from operation
|
|
351
|
+
data_type = "TEXT" # default
|
|
352
|
+
if hasattr(node.expression, 'get_datatype'):
|
|
353
|
+
operation_datatype = node.expression.get_datatype()
|
|
354
|
+
if operation_datatype:
|
|
355
|
+
# Create a mock node with the datatype to use the mapping function
|
|
356
|
+
class MockNode:
|
|
357
|
+
def __init__(self, tricc_type):
|
|
358
|
+
self.tricc_type = tricc_type
|
|
359
|
+
mock_node = MockNode(operation_datatype)
|
|
360
|
+
data_type = self.map_tricc_type_to_dhis2_value_type(mock_node)
|
|
361
|
+
|
|
362
|
+
var_name = self.get_export_name(node)
|
|
363
|
+
program_rule_variable = {
|
|
364
|
+
"id": var_id,
|
|
365
|
+
"name": var_name,
|
|
366
|
+
"programRuleVariableSourceType": "CALCULATED_VALUE",
|
|
367
|
+
"calculatedValueScript": expression_str,
|
|
368
|
+
"dataType": data_type,
|
|
369
|
+
"useCodeForOptionSet": False,
|
|
370
|
+
"program": {"id": self.program_metadata["id"]}
|
|
371
|
+
}
|
|
372
|
+
self.program_rule_variables.append(program_rule_variable)
|
|
373
|
+
# Add to concept map for potential referencing
|
|
374
|
+
self.concept_map[node.name] = var_name # Store variable name
|
|
375
|
+
return True
|
|
376
|
+
return False
|
|
377
|
+
|
|
378
|
+
def process_export(self, start_pages, **kwargs):
|
|
379
|
+
self.activity_export(start_pages["main"], **kwargs)
|
|
380
|
+
|
|
381
|
+
def activity_export(self, activity, processed_nodes=None, **kwargs):
|
|
382
|
+
if processed_nodes is None:
|
|
383
|
+
processed_nodes = OrderedSet()
|
|
384
|
+
stashed_nodes = OrderedSet()
|
|
385
|
+
groups = {}
|
|
386
|
+
groups[activity.id] = 0
|
|
387
|
+
path_len = 0
|
|
388
|
+
process = ["main"]
|
|
389
|
+
|
|
390
|
+
# Create program stage
|
|
391
|
+
stage_id = self.generate_id(self.get_export_name(activity))
|
|
392
|
+
program_stage = {
|
|
393
|
+
"id": stage_id,
|
|
394
|
+
"name": getattr(activity.root, 'label', 'Main Stage').replace('\u00a0', ' ').strip(),
|
|
395
|
+
"programStageDataElements": [],
|
|
396
|
+
"programStageSections": []
|
|
397
|
+
}
|
|
398
|
+
self.program_metadata["programStages"].append(program_stage)
|
|
399
|
+
|
|
400
|
+
# Start with the main section for this activity
|
|
401
|
+
self.start_section(activity, groups, processed_nodes, process, **kwargs)
|
|
402
|
+
|
|
403
|
+
walktrhough_tricc_node_processed_stached(
|
|
404
|
+
activity.root,
|
|
405
|
+
self.generate_export,
|
|
406
|
+
processed_nodes,
|
|
407
|
+
stashed_nodes,
|
|
408
|
+
path_len,
|
|
409
|
+
cur_group=activity.root.group,
|
|
410
|
+
process=process,
|
|
411
|
+
recursive=False,
|
|
412
|
+
**kwargs
|
|
413
|
+
)
|
|
414
|
+
|
|
415
|
+
# End the main section
|
|
416
|
+
self.end_section(activity, groups, **kwargs)
|
|
417
|
+
|
|
418
|
+
# Manage stashed nodes similar to other strategies
|
|
419
|
+
prev_stashed_nodes = stashed_nodes.copy()
|
|
420
|
+
loop_count = 0
|
|
421
|
+
len_prev_processed_nodes = 0
|
|
422
|
+
while len(stashed_nodes) > 0:
|
|
423
|
+
loop_count = check_stashed_loop(
|
|
424
|
+
stashed_nodes,
|
|
425
|
+
prev_stashed_nodes,
|
|
426
|
+
processed_nodes,
|
|
427
|
+
len_prev_processed_nodes,
|
|
428
|
+
loop_count,
|
|
429
|
+
)
|
|
430
|
+
prev_stashed_nodes = stashed_nodes.copy()
|
|
431
|
+
len_prev_processed_nodes = len(processed_nodes)
|
|
432
|
+
if len(stashed_nodes) > 0:
|
|
433
|
+
s_node = stashed_nodes.pop()
|
|
434
|
+
if s_node.group is None:
|
|
435
|
+
logger.critical("ERROR group is none for node {}".format(s_node.get_name()))
|
|
436
|
+
|
|
437
|
+
# Start section for stashed node if it's a different group
|
|
438
|
+
self.start_section(s_node.group, groups, processed_nodes, process, relevance=True, **kwargs)
|
|
439
|
+
|
|
440
|
+
walktrhough_tricc_node_processed_stached(
|
|
441
|
+
s_node,
|
|
442
|
+
self.generate_export,
|
|
443
|
+
processed_nodes,
|
|
444
|
+
stashed_nodes,
|
|
445
|
+
path_len,
|
|
446
|
+
groups=groups,
|
|
447
|
+
cur_group=s_node.group,
|
|
448
|
+
recursive=False,
|
|
449
|
+
process=process,
|
|
450
|
+
**kwargs
|
|
451
|
+
)
|
|
452
|
+
|
|
453
|
+
# End section for stashed node
|
|
454
|
+
self.end_section(s_node.group, groups, **kwargs)
|
|
455
|
+
|
|
456
|
+
return processed_nodes
|
|
457
|
+
|
|
458
|
+
def start_section(self, cur_group, groups, processed_nodes, process, relevance=False, **kwargs):
|
|
459
|
+
name = get_export_name(cur_group)
|
|
460
|
+
|
|
461
|
+
if name in groups:
|
|
462
|
+
groups[name] += 1
|
|
463
|
+
name = name + "_" + str(groups[name])
|
|
464
|
+
else:
|
|
465
|
+
groups[name] = 0
|
|
466
|
+
|
|
467
|
+
relevance_expression = (
|
|
468
|
+
cur_group.relevance if (
|
|
469
|
+
relevance and
|
|
470
|
+
cur_group.relevance is not None and
|
|
471
|
+
cur_group.relevance != ""
|
|
472
|
+
) else ""
|
|
473
|
+
)
|
|
474
|
+
|
|
475
|
+
if not relevance:
|
|
476
|
+
relevance_expression = ""
|
|
477
|
+
elif isinstance(relevance_expression, (TriccOperation, TriccStatic)):
|
|
478
|
+
relevance_expression = self.get_tricc_operation_expression(relevance_expression)
|
|
479
|
+
|
|
480
|
+
# Create section
|
|
481
|
+
section_id = self.generate_id(f"section_{name}")
|
|
482
|
+
section_name = name
|
|
483
|
+
if cur_group and hasattr(cur_group, 'label') and cur_group.label:
|
|
484
|
+
section_name = cur_group.label.replace('\u00a0', ' ').strip()
|
|
485
|
+
section = {
|
|
486
|
+
"id": section_id,
|
|
487
|
+
"name": section_name,
|
|
488
|
+
"sortOrder": len(self.sections),
|
|
489
|
+
"programStage": {"id": self.program_metadata["programStages"][-1]["id"]},
|
|
490
|
+
"dataElements": [],
|
|
491
|
+
"activity_ref": cur_group
|
|
492
|
+
}
|
|
493
|
+
# Add section to program stage
|
|
494
|
+
if self.program_metadata["programStages"]:
|
|
495
|
+
self.program_metadata["programStages"][-1]["programStageSections"].append({"id": section_id})
|
|
496
|
+
|
|
497
|
+
self.sections[section_id] = section
|
|
498
|
+
self.current_section = section_id
|
|
499
|
+
|
|
500
|
+
def end_section(self, cur_group, groups, **kwargs):
|
|
501
|
+
# In DHIS2, sections don't have explicit end markers like XLSForm groups
|
|
502
|
+
# The section is already created and added to the program stage
|
|
503
|
+
pass
|
|
504
|
+
|
|
505
|
+
def generate_export(self, node, processed_nodes, **kwargs):
|
|
506
|
+
if not is_ready_to_process(node, processed_nodes, strict=True):
|
|
507
|
+
return False
|
|
508
|
+
|
|
509
|
+
if not process_reference(
|
|
510
|
+
node, processed_nodes, {}, replace_reference=False, codesystems=kwargs.get("codesystems", None)
|
|
511
|
+
):
|
|
512
|
+
return False
|
|
513
|
+
|
|
514
|
+
if node not in processed_nodes:
|
|
515
|
+
# Skip creating data elements for calculate nodes - they should only be program rule variables
|
|
516
|
+
if not issubclass(node.__class__, TriccNodeCalculateBase):
|
|
517
|
+
data_element = self.generate_data_element(node)
|
|
518
|
+
if data_element:
|
|
519
|
+
# Add to program stage
|
|
520
|
+
if self.program_metadata["programStages"]:
|
|
521
|
+
psde_id = self.generate_id(f"psde_{node.name}")
|
|
522
|
+
psde = {
|
|
523
|
+
"id": psde_id,
|
|
524
|
+
"dataElement": {"id": data_element["id"]},
|
|
525
|
+
"compulsory": bool(getattr(node, 'required', False))
|
|
526
|
+
}
|
|
527
|
+
self.program_metadata["programStages"][-1]["programStageDataElements"].append(psde)
|
|
528
|
+
|
|
529
|
+
# Add data element to current section
|
|
530
|
+
if self.current_section and self.current_section in self.sections:
|
|
531
|
+
self.sections[self.current_section]["dataElements"].append({"id": data_element["id"]})
|
|
532
|
+
|
|
533
|
+
return True
|
|
534
|
+
|
|
535
|
+
def clean_section(self, program_stages_payload):
|
|
536
|
+
"""Clean sections by removing empty ones and merging sections with same activity_ref"""
|
|
537
|
+
sections_to_remove = set()
|
|
538
|
+
prev_activity_ref = None
|
|
539
|
+
prev_section_id = None
|
|
540
|
+
|
|
541
|
+
for section in sorted(self.sections.values(), key=lambda x: x["sortOrder"]):
|
|
542
|
+
section_id = section["id"]
|
|
543
|
+
activity_ref = section.get("activity_ref")
|
|
544
|
+
# Remove empty sections
|
|
545
|
+
if not section.get("dataElements"):
|
|
546
|
+
sections_to_remove.add(section_id)
|
|
547
|
+
|
|
548
|
+
# Check for sections with same activity_ref
|
|
549
|
+
elif activity_ref == prev_activity_ref:
|
|
550
|
+
# Merge this section into the existing one
|
|
551
|
+
existing_section = self.sections[prev_section_id]
|
|
552
|
+
|
|
553
|
+
# Move data elements to existing section
|
|
554
|
+
existing_section["dataElements"].extend(section["dataElements"])
|
|
555
|
+
|
|
556
|
+
# Mark this section for removal
|
|
557
|
+
sections_to_remove.add(section_id)
|
|
558
|
+
else:
|
|
559
|
+
prev_activity_ref = activity_ref
|
|
560
|
+
prev_section_id = section_id
|
|
561
|
+
|
|
562
|
+
# Remove sections that should be removed
|
|
563
|
+
for section_id in sections_to_remove:
|
|
564
|
+
if section_id in self.sections:
|
|
565
|
+
del self.sections[section_id]
|
|
566
|
+
|
|
567
|
+
# Update stage sections to remove deleted sections
|
|
568
|
+
for stage in program_stages_payload:
|
|
569
|
+
stage["programStageSections"][:] = [
|
|
570
|
+
s for s in stage["programStageSections"]
|
|
571
|
+
if s["id"] not in sections_to_remove
|
|
572
|
+
]
|
|
573
|
+
|
|
574
|
+
def export(self, start_pages, version):
|
|
575
|
+
form_id = start_pages["main"].root.form_id or "dhis2_program"
|
|
576
|
+
base_path = os.path.join(self.output_path, form_id)
|
|
577
|
+
if not os.path.exists(base_path):
|
|
578
|
+
os.makedirs(base_path)
|
|
579
|
+
|
|
580
|
+
# Prepare collections for all entities
|
|
581
|
+
program_rules_payload = []
|
|
582
|
+
program_rule_actions_payload = []
|
|
583
|
+
program_stages_payload = []
|
|
584
|
+
program_rule_variables_payload = []
|
|
585
|
+
|
|
586
|
+
if self.program_metadata["programStages"]:
|
|
587
|
+
# Extract full stage definitions
|
|
588
|
+
program_stages_payload = [
|
|
589
|
+
{
|
|
590
|
+
**stage,
|
|
591
|
+
"program": {"id": self.program_metadata["id"]}
|
|
592
|
+
}
|
|
593
|
+
for stage in self.program_metadata["programStages"]
|
|
594
|
+
]
|
|
595
|
+
# Clean sections before processing actions to ensure only valid sections are used
|
|
596
|
+
self.clean_section(program_stages_payload)
|
|
597
|
+
# In program, only keep stage ID references
|
|
598
|
+
self.program_metadata["programStages"] = [
|
|
599
|
+
{"id": stage["id"]}
|
|
600
|
+
for stage in program_stages_payload
|
|
601
|
+
]
|
|
602
|
+
else:
|
|
603
|
+
program_stages_payload = []
|
|
604
|
+
|
|
605
|
+
if self.program_rule_actions:
|
|
606
|
+
# Resolve activity references to section IDs for HIDESECTION actions
|
|
607
|
+
program_rule_actions_payload = []
|
|
608
|
+
for action in self.program_rule_actions:
|
|
609
|
+
if action.get("activity_ref"):
|
|
610
|
+
# Find all sections for this activity (after cleaning)
|
|
611
|
+
activity = action["activity_ref"]
|
|
612
|
+
matching_sections = [
|
|
613
|
+
sec_id for sec_id, section in self.sections.items()
|
|
614
|
+
if section.get("activity_ref") == activity
|
|
615
|
+
]
|
|
616
|
+
|
|
617
|
+
# Create one action per matching section
|
|
618
|
+
for i, section_id in enumerate(matching_sections):
|
|
619
|
+
action_copy = dict(action)
|
|
620
|
+
action_copy["programStageSection"] = {"id": section_id}
|
|
621
|
+
del action_copy["activity_ref"]
|
|
622
|
+
|
|
623
|
+
if i > 0:
|
|
624
|
+
# For additional sections, create new IDs for action and corresponding rule
|
|
625
|
+
original_rule_id = action["programRule"]["id"]
|
|
626
|
+
new_rule_id = self.generate_id(f"{original_rule_id}_section_{i}")
|
|
627
|
+
new_action_id = self.generate_id(f"{action['id']}_section_{i}")
|
|
628
|
+
|
|
629
|
+
action_copy["id"] = new_action_id
|
|
630
|
+
action_copy["programRule"] = {"id": new_rule_id}
|
|
631
|
+
|
|
632
|
+
# Create duplicate rule with new ID
|
|
633
|
+
original_rule = next(
|
|
634
|
+
(r for r in self.program_rules if r["id"] == original_rule_id), None
|
|
635
|
+
)
|
|
636
|
+
if original_rule:
|
|
637
|
+
new_rule = dict(original_rule)
|
|
638
|
+
new_rule["id"] = new_rule_id
|
|
639
|
+
new_rule["name"] = f"{original_rule['name']} (Section {i})"
|
|
640
|
+
new_rule["programRuleActions"] = [{"id": new_action_id}]
|
|
641
|
+
self.program_rules.append(new_rule)
|
|
642
|
+
|
|
643
|
+
program_rule_actions_payload.append(action_copy)
|
|
644
|
+
else:
|
|
645
|
+
# Non-activity actions (HIDEFIELD) can be added directly
|
|
646
|
+
program_rule_actions_payload.append(action)
|
|
647
|
+
|
|
648
|
+
if self.program_rules:
|
|
649
|
+
program_rules_payload = [
|
|
650
|
+
{
|
|
651
|
+
**rule,
|
|
652
|
+
"program": {"id": self.program_metadata["id"]}
|
|
653
|
+
}
|
|
654
|
+
for rule in self.program_rules
|
|
655
|
+
]
|
|
656
|
+
|
|
657
|
+
if self.program_rule_variables:
|
|
658
|
+
program_rule_variables_payload = self.program_rule_variables
|
|
659
|
+
|
|
660
|
+
# Build the program with references to other entities
|
|
661
|
+
program_payload = dict(self.program_metadata)
|
|
662
|
+
if program_rule_variables_payload:
|
|
663
|
+
program_payload["programRuleVariables"] = [
|
|
664
|
+
{"id": var["id"]}
|
|
665
|
+
for var in program_rule_variables_payload
|
|
666
|
+
]
|
|
667
|
+
if program_rules_payload:
|
|
668
|
+
program_payload["programRules"] = [
|
|
669
|
+
{"id": rule["id"]}
|
|
670
|
+
for rule in program_rules_payload
|
|
671
|
+
]
|
|
672
|
+
|
|
673
|
+
# Create single comprehensive payload with all entities at root level
|
|
674
|
+
full_payload = {
|
|
675
|
+
"programs": [program_payload]
|
|
676
|
+
}
|
|
677
|
+
|
|
678
|
+
if program_stages_payload:
|
|
679
|
+
full_payload["programStages"] = program_stages_payload
|
|
680
|
+
if program_rules_payload:
|
|
681
|
+
full_payload["programRules"] = program_rules_payload
|
|
682
|
+
if program_rule_actions_payload:
|
|
683
|
+
full_payload["programRuleActions"] = program_rule_actions_payload
|
|
684
|
+
if program_rule_variables_payload:
|
|
685
|
+
full_payload["programRuleVariables"] = program_rule_variables_payload
|
|
686
|
+
if self.data_elements:
|
|
687
|
+
full_payload["dataElements"] = list(self.data_elements.values())
|
|
688
|
+
if self.options:
|
|
689
|
+
full_payload["options"] = list(self.options.values())
|
|
690
|
+
if self.option_sets:
|
|
691
|
+
full_payload["optionSets"] = list(self.option_sets.values())
|
|
692
|
+
if self.sections:
|
|
693
|
+
# Remove activity_ref from sections before serialization
|
|
694
|
+
sections_payload = []
|
|
695
|
+
for section in self.sections.values():
|
|
696
|
+
section_copy = dict(section)
|
|
697
|
+
if "activity_ref" in section_copy:
|
|
698
|
+
del section_copy["activity_ref"]
|
|
699
|
+
sections_payload.append(section_copy)
|
|
700
|
+
full_payload["programStageSections"] = sections_payload
|
|
701
|
+
|
|
702
|
+
# Export everything to a single file
|
|
703
|
+
metadata_file = os.path.join(base_path, f"{form_id}_metadata.json")
|
|
704
|
+
with open(metadata_file, 'w') as f:
|
|
705
|
+
json.dump(full_payload, f, indent=2)
|
|
706
|
+
logger.info(f"Exported complete DHIS2 metadata to {metadata_file}")
|
|
707
|
+
|
|
708
|
+
def get_tricc_operation_operand(self, r):
|
|
709
|
+
if isinstance(r, TriccOperation):
|
|
710
|
+
return self.get_tricc_operation_expression(r)
|
|
711
|
+
elif isinstance(r, TriccReference):
|
|
712
|
+
# Use variable name from concept_map
|
|
713
|
+
node_id = self.concept_map.get(r.value.name, self.get_export_name(r.value))
|
|
714
|
+
return f"#{{{node_id}}}"
|
|
715
|
+
elif isinstance(r, TriccStatic):
|
|
716
|
+
if isinstance(r.value, bool):
|
|
717
|
+
return str(r.value).lower()
|
|
718
|
+
if isinstance(r.value, str):
|
|
719
|
+
return f"'{r.value}'"
|
|
720
|
+
else:
|
|
721
|
+
return str(r.value)
|
|
722
|
+
elif isinstance(r, bool):
|
|
723
|
+
return str(r).lower()
|
|
724
|
+
elif isinstance(r, str):
|
|
725
|
+
return f"{r}"
|
|
726
|
+
elif isinstance(r, (int, float)):
|
|
727
|
+
return str(r)
|
|
728
|
+
elif isinstance(r, TriccNodeSelectOption):
|
|
729
|
+
option = self.get_option_value(r.name)
|
|
730
|
+
if r.name in ('true', 'false'):
|
|
731
|
+
return option
|
|
732
|
+
return f"'{option}'"
|
|
733
|
+
elif issubclass(r.__class__, TriccNodeDisplayCalculateBase):
|
|
734
|
+
# Use variable name from concept_map
|
|
735
|
+
node_id = self.get_export_name(r)
|
|
736
|
+
return f"#{{{node_id}}}"
|
|
737
|
+
elif issubclass(r.__class__, TriccNodeCalculateBase):
|
|
738
|
+
# Use variable name from concept_map
|
|
739
|
+
node_id = self.get_export_name(r)
|
|
740
|
+
return f"#{{{node_id}}}"
|
|
741
|
+
elif issubclass(r.__class__, TriccNodeInputModel):
|
|
742
|
+
# Use variable name from concept_map
|
|
743
|
+
node_id = self.get_export_name(r)
|
|
744
|
+
return f"#{{{node_id}}}"
|
|
745
|
+
elif issubclass(r.__class__, TriccNodeBaseModel):
|
|
746
|
+
# Use variable name from concept_map
|
|
747
|
+
node_id = self.get_export_name(r)
|
|
748
|
+
return f"#{{{node_id}}}"
|
|
749
|
+
else:
|
|
750
|
+
raise NotImplementedError(f"This type of node {r.__class__.__name__} is not supported within an operation")
|
|
751
|
+
|
|
752
|
+
def simplify_expression(self, expr):
|
|
753
|
+
while expr.startswith('!(!(') and expr.endswith('))'):
|
|
754
|
+
expr = expr[4:-2]
|
|
755
|
+
return expr
|
|
756
|
+
|
|
757
|
+
def convert_expression_to_string(self, expression):
|
|
758
|
+
if isinstance(expression, TriccOperation):
|
|
759
|
+
expr = self.get_tricc_operation_expression(expression)
|
|
760
|
+
else:
|
|
761
|
+
expr = self.get_tricc_operation_operand(expression)
|
|
762
|
+
|
|
763
|
+
# Simplify double negations
|
|
764
|
+
expr = self.simplify_expression(expr)
|
|
765
|
+
|
|
766
|
+
return expr
|
|
767
|
+
|
|
768
|
+
# Operation methods for DHIS2 expressions
|
|
769
|
+
def tricc_operation_equal(self, ref_expressions):
|
|
770
|
+
return f"{ref_expressions[0]} == {ref_expressions[1]}"
|
|
771
|
+
|
|
772
|
+
def tricc_operation_not_equal(self, ref_expressions):
|
|
773
|
+
return f"{ref_expressions[0]} != {ref_expressions[1]}"
|
|
774
|
+
|
|
775
|
+
def tricc_operation_and(self, ref_expressions):
|
|
776
|
+
if len(ref_expressions) == 1:
|
|
777
|
+
return ref_expressions[0]
|
|
778
|
+
if len(ref_expressions) > 1:
|
|
779
|
+
return " && ".join(ref_expressions)
|
|
780
|
+
else:
|
|
781
|
+
return "true"
|
|
782
|
+
|
|
783
|
+
def tricc_operation_or(self, ref_expressions):
|
|
784
|
+
if len(ref_expressions) == 1:
|
|
785
|
+
return ref_expressions[0]
|
|
786
|
+
if len(ref_expressions) > 1:
|
|
787
|
+
return "(" + " || ".join(ref_expressions) + ")"
|
|
788
|
+
else:
|
|
789
|
+
return "true"
|
|
790
|
+
|
|
791
|
+
def tricc_operation_not(self, ref_expressions):
|
|
792
|
+
return f"!({ref_expressions[0]})"
|
|
793
|
+
|
|
794
|
+
def tricc_operation_plus(self, ref_expressions):
|
|
795
|
+
return " + ".join(ref_expressions)
|
|
796
|
+
|
|
797
|
+
def tricc_operation_minus(self, ref_expressions):
|
|
798
|
+
if len(ref_expressions) > 1:
|
|
799
|
+
return " - ".join(map(str, ref_expressions))
|
|
800
|
+
elif len(ref_expressions) == 1:
|
|
801
|
+
return f"-{ref_expressions[0]}"
|
|
802
|
+
|
|
803
|
+
def tricc_operation_more(self, ref_expressions):
|
|
804
|
+
return f"{ref_expressions[0]} > {ref_expressions[1]}"
|
|
805
|
+
|
|
806
|
+
def tricc_operation_less(self, ref_expressions):
|
|
807
|
+
return f"{ref_expressions[0]} < {ref_expressions[1]}"
|
|
808
|
+
|
|
809
|
+
def tricc_operation_more_or_equal(self, ref_expressions):
|
|
810
|
+
return f"{ref_expressions[0]} >= {ref_expressions[1]}"
|
|
811
|
+
|
|
812
|
+
def tricc_operation_less_or_equal(self, ref_expressions):
|
|
813
|
+
return f"{ref_expressions[0]} <= {ref_expressions[1]}"
|
|
814
|
+
|
|
815
|
+
def tricc_operation_selected(self, ref_expressions):
|
|
816
|
+
# For DHIS2, check if value is selected in multi-select
|
|
817
|
+
return f"d2:countIfValue({ref_expressions[0]}, {ref_expressions[1]})>0"
|
|
818
|
+
|
|
819
|
+
def tricc_operation_count(self, ref_expressions):
|
|
820
|
+
return f"d2:count({ref_expressions[0]})"
|
|
821
|
+
|
|
822
|
+
def tricc_operation_multiplied(self, ref_expressions):
|
|
823
|
+
return "*".join(ref_expressions)
|
|
824
|
+
|
|
825
|
+
def tricc_operation_divided(self, ref_expressions):
|
|
826
|
+
return f"{ref_expressions[0]} / {ref_expressions[1]}"
|
|
827
|
+
|
|
828
|
+
def tricc_operation_modulo(self, ref_expressions):
|
|
829
|
+
return f"{ref_expressions[0]} % {ref_expressions[1]}"
|
|
830
|
+
|
|
831
|
+
def tricc_operation_coalesce(self, ref_expressions):
|
|
832
|
+
return f"d2:coalesce({','.join(ref_expressions)})"
|
|
833
|
+
|
|
834
|
+
def tricc_operation_native(self, ref_expressions):
|
|
835
|
+
if len(ref_expressions) > 0:
|
|
836
|
+
return f"{ref_expressions[0]}({','.join(ref_expressions[1:])})"
|
|
837
|
+
|
|
838
|
+
def tricc_operation_istrue(self, ref_expressions):
|
|
839
|
+
return f"{ref_expressions[0]} == true"
|
|
840
|
+
|
|
841
|
+
def tricc_operation_isfalse(self, ref_expressions):
|
|
842
|
+
return f"{ref_expressions[0]} == false"
|
|
843
|
+
|
|
844
|
+
def tricc_operation_parenthesis(self, ref_expressions):
|
|
845
|
+
return f"({ref_expressions[0]})"
|
|
846
|
+
|
|
847
|
+
def tricc_operation_between(self, ref_expressions):
|
|
848
|
+
return f"{ref_expressions[0]} >= {ref_expressions[1]} && {ref_expressions[0]} < {ref_expressions[2]}"
|
|
849
|
+
|
|
850
|
+
def tricc_operation_isnull(self, ref_expressions):
|
|
851
|
+
return f"!d2:hasValue({ref_expressions[0]})"
|
|
852
|
+
|
|
853
|
+
def tricc_operation_isnotnull(self, ref_expressions):
|
|
854
|
+
return f"d2:hasValue({ref_expressions[0]})"
|
|
855
|
+
|
|
856
|
+
def tricc_operation_isnottrue(self, ref_expressions):
|
|
857
|
+
return f"{ref_expressions[0]} != true"
|
|
858
|
+
|
|
859
|
+
def tricc_operation_isnotfalse(self, ref_expressions):
|
|
860
|
+
return f"{ref_expressions[0]} != false"
|
|
861
|
+
|
|
862
|
+
def tricc_operation_notexist(self, ref_expressions):
|
|
863
|
+
return f"!d2:hasValue({ref_expressions[0]})"
|
|
864
|
+
|
|
865
|
+
def tricc_operation_case(self, ref_expressions):
|
|
866
|
+
# Simplified case handling
|
|
867
|
+
parts = []
|
|
868
|
+
for i in range(0, len(ref_expressions), 2):
|
|
869
|
+
if i + 1 < len(ref_expressions):
|
|
870
|
+
parts.append(f"if({ref_expressions[i]}, {ref_expressions[i+1]})")
|
|
871
|
+
return " || ".join(parts)
|
|
872
|
+
|
|
873
|
+
def tricc_operation_ifs(self, ref_expressions):
|
|
874
|
+
return self.tricc_operation_case(ref_expressions[1:])
|
|
875
|
+
|
|
876
|
+
def tricc_operation_if(self, ref_expressions):
|
|
877
|
+
return f"if({ref_expressions[0]}, {ref_expressions[1]}, {ref_expressions[2]})"
|
|
878
|
+
|
|
879
|
+
def tricc_operation_contains(self, ref_expressions):
|
|
880
|
+
return f"d2:contains({ref_expressions[0]}, {ref_expressions[1]})"
|
|
881
|
+
|
|
882
|
+
def tricc_operation_exists(self, ref_expressions):
|
|
883
|
+
parts = []
|
|
884
|
+
for ref in ref_expressions:
|
|
885
|
+
parts.append(f"d2:hasValue({ref})")
|
|
886
|
+
return " && ".join(parts)
|
|
887
|
+
|
|
888
|
+
def tricc_operation_cast_number(self, ref_expressions):
|
|
889
|
+
return f"d2:toNumber({ref_expressions[0]})"
|
|
890
|
+
|
|
891
|
+
def tricc_operation_cast_integer(self, ref_expressions):
|
|
892
|
+
return f"d2:toNumber({ref_expressions[0]})"
|
|
893
|
+
|
|
894
|
+
def tricc_operation_zscore(self, ref_expressions):
|
|
895
|
+
# Placeholder - would need specific implementation
|
|
896
|
+
return f"zscore({','.join(ref_expressions)})"
|
|
897
|
+
|
|
898
|
+
def tricc_operation_datetime_to_decimal(self, ref_expressions):
|
|
899
|
+
return f"d2:daysBetween({ref_expressions[0]}, '1970-01-01')"
|
|
900
|
+
|
|
901
|
+
def tricc_operation_round(self, ref_expressions):
|
|
902
|
+
return f"d2:round({ref_expressions[0]})"
|
|
903
|
+
|
|
904
|
+
def tricc_operation_izscore(self, ref_expressions):
|
|
905
|
+
return f"izscore({','.join(ref_expressions)})"
|
|
906
|
+
|
|
907
|
+
def tricc_operation_concatenate(self, ref_expressions):
|
|
908
|
+
return f"d2:concatenate({','.join(ref_expressions)})"
|