ApiLogicServer 14.2.20__py3-none-any.whl → 14.3.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ApiLogicServer-14.2.20.dist-info → ApiLogicServer-14.3.7.dist-info}/METADATA +2 -2
- {ApiLogicServer-14.2.20.dist-info → ApiLogicServer-14.3.7.dist-info}/RECORD +90 -69
- api_logic_server_cli/api_logic_server.py +5 -1
- api_logic_server_cli/api_logic_server_info.yaml +3 -3
- api_logic_server_cli/cli.py +5 -2
- api_logic_server_cli/create_from_model/__pycache__/api_logic_server_utils.cpython-312.pyc +0 -0
- api_logic_server_cli/create_from_model/__pycache__/ont_build.cpython-312.pyc +0 -0
- api_logic_server_cli/create_from_model/__pycache__/ont_create.cpython-312.pyc +0 -0
- api_logic_server_cli/create_from_model/api_logic_server_utils.py +4 -0
- api_logic_server_cli/create_from_model/ont_build.py +53 -19
- api_logic_server_cli/create_from_model/ont_create.py +14 -5
- api_logic_server_cli/fragments/declare_logic.py +72 -0
- api_logic_server_cli/{prototypes/manager/system/genai/create_db_models_inserts/logic_discovery_prefix.py → fragments/declare_logic_begin.py} +2 -1
- api_logic_server_cli/fragments/declare_logic_end.py +52 -0
- api_logic_server_cli/genai/genai.py +25 -8
- api_logic_server_cli/genai/genai_logic_builder.py +14 -11
- api_logic_server_cli/genai/genai_svcs.py +104 -7
- api_logic_server_cli/manager.py +20 -16
- api_logic_server_cli/model_migrator/model_migrator_start.py +1 -1
- api_logic_server_cli/model_migrator/reposreader.py +9 -1
- api_logic_server_cli/model_migrator/rule_obj.py +24 -6
- api_logic_server_cli/prototypes/base/api/api_discovery/ontimize_api.py +4 -1
- api_logic_server_cli/prototypes/base/api/system/expression_parser.py +10 -4
- api_logic_server_cli/prototypes/base/config/activate_logicbank.py +8 -4
- api_logic_server_cli/prototypes/base/database/bind_dbs.py +1 -1
- api_logic_server_cli/prototypes/base/database/test_data/readme.md +5 -5
- api_logic_server_cli/prototypes/base/integration/kafka/kafka_producer.py +32 -8
- api_logic_server_cli/prototypes/base/integration/system/RowDictMapper.py +33 -16
- api_logic_server_cli/prototypes/base/logic/declare_logic.py +9 -3
- api_logic_server_cli/prototypes/base/logic/load_verify_rules.py +217 -0
- api_logic_server_cli/prototypes/base/logic/logic_discovery/auto_discovery.py +22 -13
- api_logic_server_cli/prototypes/genai_demo/api/customize_api.py +9 -11
- api_logic_server_cli/prototypes/genai_demo/database/.DS_Store +0 -0
- api_logic_server_cli/prototypes/genai_demo/database/db.sqlite +0 -0
- api_logic_server_cli/prototypes/genai_demo/database/models.py +52 -42
- api_logic_server_cli/prototypes/genai_demo/integration/row_dict_maps/OrderB2B.py +4 -6
- api_logic_server_cli/prototypes/genai_demo/integration/row_dict_maps/__pycache__/OrderB2B.cpython-312.pyc +0 -0
- api_logic_server_cli/prototypes/genai_demo/integration/row_dict_maps/row_dict_maps_readme.md +3 -0
- api_logic_server_cli/prototypes/genai_demo/logic/__pycache__/declare_logic.cpython-312.pyc +0 -0
- api_logic_server_cli/prototypes/genai_demo/logic/__pycache__/load_verify_rules.cpython-312.pyc +0 -0
- api_logic_server_cli/prototypes/genai_demo/logic/declare_logic.py +58 -62
- api_logic_server_cli/prototypes/genai_demo/logic/load_verify_rules.py +216 -0
- api_logic_server_cli/prototypes/genai_demo/logic/logic_discovery/__pycache__/__init__.cpython-312.pyc +0 -0
- api_logic_server_cli/prototypes/genai_demo/logic/logic_discovery/__pycache__/auto_discovery.cpython-312.pyc +0 -0
- api_logic_server_cli/prototypes/genai_demo/logic/logic_discovery/__pycache__/error_testing.cpython-312.pyc +0 -0
- api_logic_server_cli/prototypes/genai_demo/logic/logic_discovery/auto_discovery.py +52 -0
- api_logic_server_cli/prototypes/genai_demo/logic/readme_declare_logic.md +172 -0
- api_logic_server_cli/prototypes/genai_demo/security/__pycache__/declare_security.cpython-312.pyc +0 -0
- api_logic_server_cli/prototypes/genai_demo/ui/admin/admin.yaml +86 -53
- api_logic_server_cli/prototypes/manager/.vscode/launch.json +1 -1
- api_logic_server_cli/prototypes/manager/README.md +19 -4
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo.prompt +4 -1
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo.response_example +34 -26
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_informal.prompt +3 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_iteration_discount/.DS_Store +0 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_iteration_discount/000_you_are.prompt +1 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_iteration_discount/001_logic_training.prompt +314 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_iteration_discount/002_create_db_models.prompt +150 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_iteration_discount/003_create_db_models.response +134 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_iteration_discount/004_iteratio_logic.prompt +131 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_iteration_discount/005_create_db_models.response-example +141 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_iteration_discount/create_db_models.py +105 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_iteration_discount/db.dbml +70 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_iteration_discount/readme.md +6 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/genai_demo_iteration_discount/response.json +178 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/wg_dev_merge/base_genai_demo_no_logic/logic/declare_logic.py +0 -1
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/wg_dev_merge/dev_demo_no_logic_fixed/logic/declare_logic.py +0 -1
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/wg_dev_merge/wg_demo_no_logic_fixed/genai/examples/genai_demo/wg_dev_merge/base_genai_demo_no_logic/logic/declare_logic.py +0 -1
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/wg_dev_merge/wg_demo_no_logic_fixed/genai/examples/genai_demo/wg_dev_merge/dev_demo_no_logic_fixed/logic/declare_logic.py +0 -1
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/wg_dev_merge/wg_demo_no_logic_fixed/genai/examples/genai_demo/wg_dev_merge/wg_genai_demo_no_logic_fixed_from_CLI/logic/declare_logic.py +0 -1
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/wg_dev_merge/wg_demo_no_logic_fixed/logic/declare_logic.py +0 -1
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/wg_dev_merge/wg_demo_no_logic_fixed/system/genai/examples/genai_demo/wg_dev_merge/base_genai_demo_no_logic/logic/declare_logic.py +0 -1
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/wg_dev_merge/wg_demo_no_logic_fixed/system/genai/examples/genai_demo/wg_dev_merge/dev_demo_no_logic_fixed/logic/declare_logic.py +0 -1
- api_logic_server_cli/prototypes/manager/system/genai/examples/genai_demo/wg_dev_merge/wg_demo_no_logic_fixed/system/genai/examples/genai_demo/wg_dev_merge/wg_genai_demo_no_logic_fixed_from_CLI/logic/declare_logic.py +0 -1
- api_logic_server_cli/prototypes/manager/system/genai/examples/time_tracking_billing/002_create_db_models.prompt +194 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/time_tracking_billing/003_create_db_models.response +298 -0
- api_logic_server_cli/prototypes/{genai_demo/database/chatgpt/sample_ai.sqlite → manager/system/genai/examples/time_tracking_billing/db.sqlite} +0 -0
- api_logic_server_cli/prototypes/manager/system/genai/examples/time_tracking_billing/readme.md +61 -0
- api_logic_server_cli/prototypes/manager/system/genai/learning_requests/logic_bank_api.prompt +29 -11
- api_logic_server_cli/prototypes/manager/system/genai/prompt_inserts/iteration.prompt +2 -1
- api_logic_server_cli/prototypes/nw_no_cust/venv_setup/system_note.txt +1 -1
- api_logic_server_cli/prototypes/ont_app/templates/home_tree_template.html +9 -0
- api_logic_server_cli/prototypes/ont_app/templates/tree_routing.jinja +32 -0
- api_logic_server_cli/sqlacodegen_wrapper/sqlacodegen/sqlacodegen/__pycache__/codegen.cpython-312.pyc +0 -0
- api_logic_server_cli/sqlacodegen_wrapper/sqlacodegen/sqlacodegen/codegen.py +4 -2
- api_logic_server_cli/tools/mini_skel/logic/load_verify_rules.py +1 -1
- api_logic_server_cli/model_migrator/system/custom_endpoint.py +0 -545
- api_logic_server_cli/prototypes/base/database/test_data/z_test_data_rows.py +0 -98
- api_logic_server_cli/prototypes/genai_demo/database/chatgpt/__pycache__/copilot_models.cpython-312.pyc +0 -0
- api_logic_server_cli/prototypes/genai_demo/database/chatgpt/__pycache__/sample_ai_models.cpython-312.pyc +0 -0
- api_logic_server_cli/prototypes/genai_demo/database/chatgpt/sample_ai.chatgpt +0 -16
- api_logic_server_cli/prototypes/genai_demo/database/chatgpt/sample_ai.sql +0 -66
- api_logic_server_cli/prototypes/genai_demo/database/chatgpt/sample_ai_items.sqlite +0 -0
- api_logic_server_cli/prototypes/genai_demo/database/chatgpt/sample_ai_models.py +0 -156
- api_logic_server_cli/prototypes/genai_demo/database/chatgpt/sample_ai_models.sqlite +0 -0
- api_logic_server_cli/prototypes/genai_demo/logic/cocktail-napkin.jpg +0 -0
- {ApiLogicServer-14.2.20.dist-info → ApiLogicServer-14.3.7.dist-info}/LICENSE +0 -0
- {ApiLogicServer-14.2.20.dist-info → ApiLogicServer-14.3.7.dist-info}/WHEEL +0 -0
- {ApiLogicServer-14.2.20.dist-info → ApiLogicServer-14.3.7.dist-info}/entry_points.txt +0 -0
- {ApiLogicServer-14.2.20.dist-info → ApiLogicServer-14.3.7.dist-info}/top_level.txt +0 -0
api_logic_server_cli/manager.py
CHANGED
|
@@ -11,14 +11,14 @@ from pathlib import Path
|
|
|
11
11
|
import api_logic_server_cli.api_logic_server as PR
|
|
12
12
|
|
|
13
13
|
def create_manager(clean: bool, open_with: str, api_logic_server_path: Path,
|
|
14
|
-
volume: str = "", open_manager: bool = True):
|
|
14
|
+
volume: str = "", open_manager: bool = True, samples: bool = True):
|
|
15
15
|
"""Implements als start to create manager - called from api_logic_server_cli/cli.py
|
|
16
16
|
|
|
17
17
|
create Manager at os.getcwd(), including:
|
|
18
18
|
|
|
19
19
|
1. .vscode, readme
|
|
20
20
|
2. System folder (GenAI sample prompts / responses, others TBD)
|
|
21
|
-
3. pre-created samples
|
|
21
|
+
3. pre-created samples (optional)
|
|
22
22
|
|
|
23
23
|
Example, from CLI in directory containing a `venv` (see https://apilogicserver.github.io/Docs/Manager/):
|
|
24
24
|
als start
|
|
@@ -122,21 +122,25 @@ def create_manager(clean: bool, open_with: str, api_logic_server_path: Path,
|
|
|
122
122
|
except: # do NOT fail
|
|
123
123
|
pass # just fall back to using the pip-installed version
|
|
124
124
|
|
|
125
|
-
if
|
|
126
|
-
|
|
125
|
+
if not samples:
|
|
126
|
+
shutil.rmtree(to_dir.joinpath(f'{docker_volume}system/app_model_editor'))
|
|
127
|
+
shutil.rmtree(to_dir.joinpath(f'{docker_volume}system/genai/examples/genai_demo/wg_dev_merge'))
|
|
127
128
|
else:
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
129
|
+
if project.is_docker:
|
|
130
|
+
log.debug(f" tutorial not created for docker\n\n")
|
|
131
|
+
else:
|
|
132
|
+
tutorial_project = PR.ProjectRun(command="tutorial",
|
|
133
|
+
project_name='./samples',
|
|
134
|
+
db_url="",
|
|
135
|
+
execute=False,
|
|
136
|
+
open_with="NO_AUTO_OPEN"
|
|
137
|
+
)
|
|
138
|
+
tutorial_project = tutorial_project.tutorial(msg="Creating:") ##, create='tutorial')
|
|
139
|
+
|
|
140
|
+
samples_project = PR.ProjectRun(command= "create", project_name=f'{docker_volume}samples/nw_sample', db_url='nw+', open_with="NO_AUTO_OPEN")
|
|
141
|
+
log.setLevel(mgr_save_level)
|
|
142
|
+
log.disabled = False # todo why was it reset?
|
|
143
|
+
samples_project = PR.ProjectRun(command= "create", project_name=f'{docker_volume}samples/nw_sample_nocust', db_url='nw', open_with="NO_AUTO_OPEN")
|
|
140
144
|
log.info('')
|
|
141
145
|
log.setLevel(mgr_save_level)
|
|
142
146
|
log.disabled = False
|
|
@@ -56,7 +56,7 @@ class ModelMigrator(object):
|
|
|
56
56
|
# need repos location and project api name (teamspaces/api/{lac_project_name})
|
|
57
57
|
running_at = Path(__file__)
|
|
58
58
|
repos_location = f"{running_at.parent}{os.sep}CALiveAPICreator.repository"
|
|
59
|
-
lac_project_name = "
|
|
59
|
+
lac_project_name = "repos" # pass as args.lac_project_name TODO
|
|
60
60
|
repo_reader.start(repos_location, self.project_directory, lac_project_name, table_to_class )
|
|
61
61
|
log.debug('.. ModelMigrator.run() - done')
|
|
62
62
|
|
|
@@ -404,7 +404,7 @@ class ModelMigrationService(object):
|
|
|
404
404
|
j = json.loads(d)
|
|
405
405
|
isActive = j["isActive"]
|
|
406
406
|
if isActive:
|
|
407
|
-
func_type = j
|
|
407
|
+
func_type = j.get("functionType","Java")
|
|
408
408
|
if func_type == "rowLevel":
|
|
409
409
|
comments = j["comments"]
|
|
410
410
|
if comments != "":
|
|
@@ -427,6 +427,14 @@ class ModelMigrationService(object):
|
|
|
427
427
|
self.add_content("'''")
|
|
428
428
|
self.add_content("")
|
|
429
429
|
lac_func.append(name)
|
|
430
|
+
elif func_type == "Java":
|
|
431
|
+
name = j["name"]
|
|
432
|
+
self.add_content("'''")
|
|
433
|
+
self.add_content(f"#Java Function: {j['methodName']}")
|
|
434
|
+
self.add_content(f"#Java Function: {j['className']}")
|
|
435
|
+
self.add_content("'''")
|
|
436
|
+
self.add_content("")
|
|
437
|
+
lac_func.append(name)
|
|
430
438
|
return lac_func
|
|
431
439
|
|
|
432
440
|
def functionList(self, thisPath: str):
|
|
@@ -102,25 +102,43 @@ class RuleObj:
|
|
|
102
102
|
title =""
|
|
103
103
|
if j.title is not None:
|
|
104
104
|
title = j.title
|
|
105
|
-
funName = "fn_" + name.split(".")[0]
|
|
105
|
+
#funName = "fn_" + name.split(".")[0]
|
|
106
|
+
entityLower = entity.lower()
|
|
107
|
+
funName = f"fn_{entityLower}_{ruleType}_{name}"
|
|
106
108
|
comments = j.comments
|
|
107
109
|
appliesTo = ""
|
|
108
110
|
if j.appliesTo is not None:
|
|
109
111
|
appliesTo = j.appliesTo
|
|
110
112
|
|
|
111
113
|
# Define a function to use in the rule
|
|
112
|
-
ruleJSObj = None if self.jsObj is None else fixup(self.jsObj)
|
|
114
|
+
ruleJSObj = None if self.jsObj is None else fixup(self.jsObj) if self.jsObj is None else ""
|
|
113
115
|
tab = "\t\t"
|
|
114
116
|
self.add_content(f"\t# RuleType: {ruleType}")
|
|
115
117
|
self.add_content(f"\t# Title: {title}")
|
|
116
118
|
self.add_content(f"\t# Name: {name}")
|
|
117
119
|
self.add_content(f"\t# Entity: {entity}")
|
|
120
|
+
|
|
121
|
+
codeType = j.get("codeType", None)
|
|
122
|
+
if codeType == "Java":
|
|
123
|
+
className = j.get("className", None)
|
|
124
|
+
methodName = j.get("methodName", None)
|
|
125
|
+
self.add_content(f"\t# CodeType: {codeType}")
|
|
126
|
+
self.add_content(f"\t# ClassName: {className}")
|
|
127
|
+
self.add_content(f"\t# MethodName: {methodName}")
|
|
128
|
+
if name == "cache":
|
|
129
|
+
funName = f"fn_{methodName}"
|
|
130
|
+
|
|
131
|
+
|
|
118
132
|
self.add_content(f"\t# Comments: {comments}")
|
|
119
133
|
self.add_content("")
|
|
134
|
+
if codeType == "Java":
|
|
135
|
+
self.add_content(f"\tdef {funName}(row: models.{entity}, old_row: models.{entity}, logic_row: LogicRow):")
|
|
136
|
+
self.add_content(f"\t\t# Call Java Code: {className}.{methodName}(row, old_row, logic_row)")
|
|
137
|
+
self.add_content("\t\tpass")
|
|
138
|
+
|
|
139
|
+
|
|
120
140
|
if ruleJSObj is not None:
|
|
121
|
-
|
|
122
|
-
funName = f"\tfn_{entityLower}_{ruleType}_{name}"
|
|
123
|
-
if len(ruleJSObj) < 80 and ruleType == "formula":
|
|
141
|
+
if len(ruleJSObj) < 80 and ruleType == "formula" and codeType == "JavaScript":
|
|
124
142
|
pass
|
|
125
143
|
else:
|
|
126
144
|
self.add_content(f"\tdef {funName}(row: models.{entity}, old_row: models.{entity}, logic_row: LogicRow):")
|
|
@@ -149,7 +167,7 @@ class RuleObj:
|
|
|
149
167
|
if ruleJSObj is not None and len(ruleJSObj) > 80:
|
|
150
168
|
self.add_content(f"{tab}calling={funName})")
|
|
151
169
|
else:
|
|
152
|
-
ruleJSObj = ruleJSObj.replace("return","lambda row: ")
|
|
170
|
+
ruleJSObj = ruleJSObj.replace("return","lambda row: ") if ruleJSObj is not None else ""
|
|
153
171
|
self.add_content(f"{tab}as_expression={ruleJSObj})")
|
|
154
172
|
case "count":
|
|
155
173
|
attr = j.attribute
|
|
@@ -231,7 +231,10 @@ def add_service(app, api, project_dir, swagger_host: str, PORT: str, method_deco
|
|
|
231
231
|
session.flush()
|
|
232
232
|
except Exception as ex:
|
|
233
233
|
session.rollback()
|
|
234
|
-
|
|
234
|
+
msg = f"{ex.message if hasattr(ex, 'message') else ex}"
|
|
235
|
+
return jsonify(
|
|
236
|
+
{"code": 1, "message": f"{msg}", "data": [], "sqlTypes": None}
|
|
237
|
+
)
|
|
235
238
|
|
|
236
239
|
return jsonify({"code":0,"message":f"{method}:True","data":result,"sqlTypes":None}) #{f"{method}":True})
|
|
237
240
|
|
|
@@ -153,18 +153,24 @@ def fixup_sort(clz, data):
|
|
|
153
153
|
continue
|
|
154
154
|
return sort
|
|
155
155
|
def fixup_data(data, sqltypes):
|
|
156
|
+
new_data = None
|
|
156
157
|
if data:
|
|
158
|
+
new_data = {}
|
|
157
159
|
for key, value in data.items():
|
|
160
|
+
new_data[key] = value
|
|
158
161
|
if sqltypes and key in sqltypes and isinstance(value, str):
|
|
159
162
|
if sqltypes[key] in [-5,2,4,5,-6]: #BIGINT, TINYINT, INT, SMALLINT, INTEGER
|
|
160
|
-
|
|
163
|
+
if new_data[key].isdigit():
|
|
164
|
+
new_data[key] = int(value)
|
|
165
|
+
else:
|
|
166
|
+
del new_data[key]
|
|
161
167
|
elif sqltypes[key] in [6]: #DECIMAL
|
|
162
|
-
|
|
168
|
+
new_data[key] = Decimal(value)
|
|
163
169
|
if sqltypes and key in sqltypes and sqltypes[key] in [91,93] and isinstance(value, int): #DATE, TIMESTAMP
|
|
164
170
|
from datetime import datetime
|
|
165
171
|
fmt = "%Y-%m-%d" if sqltypes[key] == 91 else "%Y-%m-%d %H:%M:%S"
|
|
166
|
-
|
|
167
|
-
return
|
|
172
|
+
new_data[key] = datetime.fromtimestamp(value / 1000) #.strftime(fmt)
|
|
173
|
+
return new_data
|
|
168
174
|
|
|
169
175
|
def _parseFilter(filter: dict, sqltypes: any):
|
|
170
176
|
# {filter":{"@basic_expression":{"lop":"BALANCE","op":"<=","rop":35000}}
|
|
@@ -19,23 +19,27 @@ def activate_logicbank(session, constraint_handler):
|
|
|
19
19
|
|
|
20
20
|
app_logger.info("LogicBank Activation - declare_logic.py")
|
|
21
21
|
aggregate_defaults = os.environ.get("AGGREGATE_DEFAULTS") == "True"
|
|
22
|
+
all_defaults = os.environ.get("ALL_DEFAULTS") == "True"
|
|
22
23
|
try: # hover activate for info
|
|
23
24
|
LogicBank.activate(session=session,
|
|
24
25
|
activator=declare_logic.declare_logic,
|
|
25
26
|
constraint_event=constraint_handler,
|
|
26
|
-
aggregate_defaults=aggregate_defaults
|
|
27
|
+
aggregate_defaults=aggregate_defaults,
|
|
28
|
+
all_defaults=all_defaults)
|
|
27
29
|
except LBActivateException as e:
|
|
28
30
|
app_logger.error("\nLogic Bank Activation Error -- see https://apilogicserver.github.io/Docs/WebGenAI-CLI/#recovery-options")
|
|
29
31
|
if e.invalid_rules: logic_logger.error(f"Invalid Rules: {e.invalid_rules}")
|
|
30
32
|
if e.missing_attributes: logic_logger.error(f"Missing Attrs (try als genai-utils --fixup): {e.missing_attributes}")
|
|
31
33
|
app_logger.error("\n")
|
|
32
|
-
if not os.environ.get("VERIFY_RULES") == "True":
|
|
33
|
-
# WG Rule Verification, continue if VERIFY_RULES is True
|
|
34
|
+
if not os.environ.get("VERIFY_RULES") == "True" and not os.environ.get("WG_PROJECT") == "True":
|
|
35
|
+
# WG Rule Verification, continue if VERIFY_RULES is True or inside WebGenAI
|
|
34
36
|
raise e
|
|
35
37
|
|
|
36
38
|
except Exception as e:
|
|
37
39
|
app_logger.error(f"Logic Bank Activation Error: {e}")
|
|
38
40
|
app_logger.exception(e)
|
|
39
|
-
|
|
41
|
+
if not os.environ.get("WG_PROJECT") == "True":
|
|
42
|
+
# Continue if inside WebGenAI
|
|
43
|
+
raise e
|
|
40
44
|
logic_logger.setLevel(logic_logger_level)
|
|
41
45
|
|
|
@@ -15,7 +15,7 @@ def bind_dbs(flask_app):
|
|
|
15
15
|
|
|
16
16
|
flask_app.config.update(SQLALCHEMY_BINDS = {
|
|
17
17
|
'authentication': flask_app.config['SQLALCHEMY_DATABASE_URI_AUTHENTICATION'],
|
|
18
|
-
'landing_page' : flask_app.config['SQLALCHEMY_DATABASE_URI_LANDING']
|
|
18
|
+
'landing_page' : flask_app.config['SQLALCHEMY_DATABASE_URI_LANDING']
|
|
19
19
|
}) # make multiple databases available to SQLAlchemy
|
|
20
20
|
|
|
21
21
|
return
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
|
|
1
|
+
ChatGPT sometimes fails to build proper test data that matches the derivation rules.
|
|
2
2
|
|
|
3
|
-
You can rebuild the test data, using Logic Bank rules for proper derivations.
|
|
3
|
+
You can rebuild the test data, using Logic Bank rules for proper derivations, to rebuild your `database/db.sqlite` (make a copy first to preserve your existing data).
|
|
4
4
|
|
|
5
|
-
Envisioned support will create a new db.sqlite, with test data that reflects derivations.
|
|
6
|
-
Review, and copy to your database/db.sqlite.
|
|
7
5
|
|
|
8
6
|
```
|
|
9
7
|
als genai-utils --rebuild-test-data
|
|
10
|
-
```
|
|
8
|
+
```
|
|
9
|
+
|
|
10
|
+
You can explore the generated `database/test_data/test_data_code.py` to control test data generation.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"""
|
|
2
2
|
|
|
3
|
-
Version
|
|
3
|
+
Version 2.0
|
|
4
4
|
|
|
5
5
|
Invoked at server start (api_logic_server_run.py -> config/setup.py)
|
|
6
6
|
|
|
@@ -47,8 +47,23 @@ def kafka_producer():
|
|
|
47
47
|
producer = Producer(conf)
|
|
48
48
|
logger.debug(f'\nKafka producer connected')
|
|
49
49
|
|
|
50
|
+
from sqlalchemy.inspection import inspect
|
|
50
51
|
|
|
51
|
-
def
|
|
52
|
+
def get_primary_key(logic_row: LogicRow):
|
|
53
|
+
""" Return primary key for row, if it exists
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
logic_row (LogicRow): The SQLAlchemy row object
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
dict: A dictionary with primary key column names and their values
|
|
60
|
+
"""
|
|
61
|
+
primary_key_columns = inspect(logic_row.row).mapper.primary_key
|
|
62
|
+
primary_key = {column.name: getattr(logic_row.row, column.name) for column in primary_key_columns}
|
|
63
|
+
return primary_key
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def send_kafka_message(kafka_topic: str, kafka_key: str = None, msg: str="", json_root_name: str = "",
|
|
52
67
|
logic_row: LogicRow = None, row_dict_mapper: RowDictMapper = None, payload: dict = None):
|
|
53
68
|
""" Send Kafka message regarding logic_row, mapped by row_dict_mapper
|
|
54
69
|
|
|
@@ -80,18 +95,27 @@ def send_kafka_message(kafka_topic: str, kafka_key: str, msg: str="", json_root_
|
|
|
80
95
|
else:
|
|
81
96
|
root_name = logic_row.name
|
|
82
97
|
|
|
98
|
+
if kafka_key is None:
|
|
99
|
+
kafka_key = get_primary_key(logic_row)
|
|
100
|
+
|
|
101
|
+
log_msg = msg if msg != "" else f"Sending {root_name} to Kafka topic '{kafka_topic}'"
|
|
102
|
+
|
|
83
103
|
json_string = jsonify({f'{root_name}': row_obj_dict}).data.decode('utf-8')
|
|
84
|
-
log_msg =
|
|
104
|
+
log_msg = log_msg
|
|
85
105
|
if producer: # enabled in config/config.py?
|
|
86
106
|
try:
|
|
87
107
|
producer.produce(value=json_string, topic="order_shipping", key=kafka_key)
|
|
88
108
|
if logic_row:
|
|
89
|
-
logic_row.log(
|
|
109
|
+
logic_row.log(log_msg)
|
|
90
110
|
except KafkaException as ke:
|
|
91
111
|
logger.error("kafka_producer#send_kafka_message error: {ke}")
|
|
92
112
|
else:
|
|
93
|
-
log_msg += "
|
|
113
|
+
log_msg += " [Note: **Kafka not enabled** ]"
|
|
94
114
|
if logic_row is not None:
|
|
95
|
-
logic_row.log(f'
|
|
96
|
-
|
|
97
|
-
|
|
115
|
+
logic_row.log(f'{log_msg}')
|
|
116
|
+
logger.debug(f'\n\n{log_msg}\n{json_string}')
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def send_row_to_kafka(row: object, old_row: object, logic_row: LogicRow, with_args: dict):
|
|
120
|
+
if logic_row.row.date_shipped is not None:
|
|
121
|
+
send_kafka_message(logic_row=logic_row, kafka_topic=with_args["topic"])
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from database import models
|
|
2
2
|
from flask import request, jsonify
|
|
3
3
|
import sqlalchemy as sqlalchemy
|
|
4
|
-
from sqlalchemy import Column
|
|
4
|
+
from sqlalchemy import Column, inspect
|
|
5
5
|
from sqlalchemy.ext.declarative import declarative_base
|
|
6
6
|
from flask_sqlalchemy.model import DefaultMeta
|
|
7
7
|
from sqlalchemy.ext.hybrid import hybrid_property
|
|
@@ -276,8 +276,7 @@ class RowDictMapper():
|
|
|
276
276
|
parent_lookup_list.append(self.parent_lookups)
|
|
277
277
|
for each_parent_lookup in parent_lookup_list:
|
|
278
278
|
self._parent_lookup_from_child(child_row_dict = row_dict,
|
|
279
|
-
|
|
280
|
-
parent_class = each_parent_lookup[0],
|
|
279
|
+
parent_lookup = each_parent_lookup,
|
|
281
280
|
child_row = sql_alchemy_row,
|
|
282
281
|
session = session)
|
|
283
282
|
|
|
@@ -354,27 +353,31 @@ class RowDictMapper():
|
|
|
354
353
|
|
|
355
354
|
def _parent_lookup_from_child(self, child_row_dict: dict, child_row: object,
|
|
356
355
|
session: object,
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
""" Used from child -- parent_lookups
|
|
356
|
+
parent_lookup: tuple[DefaultMeta, list[tuple[Column, str]]]):
|
|
357
|
+
""" Used from child -- parent_lookups (e,g, B2B Product)
|
|
360
358
|
|
|
361
359
|
Args:
|
|
362
|
-
child_row_dict (dict):
|
|
363
|
-
child_row (object):
|
|
364
|
-
|
|
365
|
-
|
|
360
|
+
child_row_dict (dict): the incoming payload
|
|
361
|
+
child_row (object): row
|
|
362
|
+
parent_lookup (tuple[DefaultMeta, list[tuple[Column, str]]]): parent class, list of attrs/json keys
|
|
363
|
+
session (object): SqlAlchemy session
|
|
364
|
+
|
|
365
|
+
Example lookup_fields (genai_demo/OrderB2B.py):
|
|
366
|
+
parent_lookup = ( models.Customer, [(models.Customer.name, 'Account')] )
|
|
367
|
+
parent_class: parent_lookups[0] Customer)
|
|
368
|
+
lookup_fields: parent_lookups[1] [(models.Customer.name, 'Account')]
|
|
366
369
|
|
|
367
370
|
Raises:
|
|
368
|
-
ValueError:
|
|
369
|
-
ValueError: _description_
|
|
371
|
+
ValueError: eg, missing parent
|
|
370
372
|
"""
|
|
371
|
-
|
|
373
|
+
parent_class = parent_lookup[0]
|
|
374
|
+
lookup_fields = parent_lookup[1]
|
|
372
375
|
query = session.query(parent_class)
|
|
373
376
|
|
|
374
377
|
if parent_class.__name__ in ['Product', 'Customer']:
|
|
375
378
|
logging.debug(f'_parent_lookup_from_child {parent_class.__name__}' )
|
|
376
|
-
for each_lookup_param_field in lookup_fields:
|
|
377
|
-
attr_name = each_lookup_param_field
|
|
379
|
+
for each_lookup_param_field in lookup_fields: # e.g, (models.Customer.name, 'Account')
|
|
380
|
+
attr_name = each_lookup_param_field # <col_def> <filter-val>
|
|
378
381
|
if isinstance(each_lookup_param_field, tuple):
|
|
379
382
|
col_def = each_lookup_param_field[0]
|
|
380
383
|
attr_name = each_lookup_param_field[1]
|
|
@@ -394,6 +397,20 @@ class RowDictMapper():
|
|
|
394
397
|
raise ValueError('Lookup failed: missing parent', child_row, parent_class.__name__, str(child_row_dict))
|
|
395
398
|
|
|
396
399
|
parent_row = parent_rows[0]
|
|
397
|
-
|
|
400
|
+
|
|
401
|
+
# find parent accessor - usually parent_class.__name__, unless fk is lower case (B2bOrders)
|
|
402
|
+
mapper = inspect(child_row).mapper
|
|
403
|
+
parent_accessor = None
|
|
404
|
+
for each_attribute in mapper.attrs: # find parent accessors
|
|
405
|
+
if isinstance(each_attribute, sqlalchemy.orm.relationships.RelationshipProperty):
|
|
406
|
+
if each_attribute.argument == parent_class.__name__:
|
|
407
|
+
if parent_accessor is None:
|
|
408
|
+
parent_accessor = each_attribute.key
|
|
409
|
+
else:
|
|
410
|
+
raise ValueError(f'Parent accessor not unique: {parent_accessor}') # TODO - multiple parents
|
|
411
|
+
if parent_accessor is None:
|
|
412
|
+
raise ValueError(f'Parent accessor not found: {parent_class.__name__}')
|
|
413
|
+
|
|
414
|
+
setattr(child_row, parent_accessor, parent_row)
|
|
398
415
|
|
|
399
416
|
return
|
|
@@ -5,9 +5,10 @@ from logic_bank.extensions.rule_extensions import RuleExtension
|
|
|
5
5
|
from logic_bank.logic_bank import Rule
|
|
6
6
|
from logic_bank.logic_bank import DeclareRule
|
|
7
7
|
import database.models as models
|
|
8
|
-
from database.models import *
|
|
9
8
|
import api.system.opt_locking.opt_locking as opt_locking
|
|
10
9
|
from security.system.authorization import Grant, Security
|
|
10
|
+
from logic.load_verify_rules import load_verify_rules
|
|
11
|
+
import integration.kafka.kafka_producer as kafka_producer
|
|
11
12
|
import logging
|
|
12
13
|
|
|
13
14
|
app_logger = logging.getLogger(__name__)
|
|
@@ -22,8 +23,13 @@ def declare_logic():
|
|
|
22
23
|
Your Code Goes Here - Use code completion (Rule.) to declare rules
|
|
23
24
|
'''
|
|
24
25
|
|
|
25
|
-
|
|
26
|
-
|
|
26
|
+
if os.environ.get("WG_PROJECT"):
|
|
27
|
+
# Inside WG: Load rules from docs/expprt/export.json
|
|
28
|
+
load_verify_rules()
|
|
29
|
+
else:
|
|
30
|
+
# Outside WG: load declare_logic function
|
|
31
|
+
from logic.logic_discovery.auto_discovery import discover_logic
|
|
32
|
+
discover_logic()
|
|
27
33
|
|
|
28
34
|
def handle_all(logic_row: LogicRow): # #als: TIME / DATE STAMPING, OPTIMISTIC LOCKING
|
|
29
35
|
"""
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
#
|
|
2
|
+
# This code loads and verifies rules from export.json and activates them if they pass verification
|
|
3
|
+
# It is WebGenAI specific, used only when env var WG_PROJECT is set
|
|
4
|
+
#
|
|
5
|
+
import ast
|
|
6
|
+
import json
|
|
7
|
+
import logging
|
|
8
|
+
import os
|
|
9
|
+
import sys
|
|
10
|
+
import safrs
|
|
11
|
+
import subprocess
|
|
12
|
+
from importlib import import_module
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from werkzeug.utils import secure_filename
|
|
15
|
+
from database.models import *
|
|
16
|
+
from logic_bank.logic_bank import DeclareRule, Rule, LogicBank
|
|
17
|
+
from colorama import Fore, Style, init
|
|
18
|
+
from logic_bank.logic_bank import RuleBank
|
|
19
|
+
from logic_bank.rule_bank.rule_bank_setup import find_referenced_attributes
|
|
20
|
+
import tempfile
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
app_logger = logging.getLogger(__name__)
|
|
24
|
+
declare_logic_message = "ALERT: *** No Rules Yet ***" # printed in api_logic_server.py
|
|
25
|
+
|
|
26
|
+
rule_import_template = """
|
|
27
|
+
from logic_bank.logic_bank import Rule
|
|
28
|
+
from database.models import *
|
|
29
|
+
import integration.kafka.kafka_producer as kafka_producer
|
|
30
|
+
|
|
31
|
+
def init_rule():
|
|
32
|
+
{rule_code}
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
MANAGER_PATH = "/opt/webgenai/database/manager.py"
|
|
36
|
+
EXPORT_JSON_PATH = os.environ.get("EXPORT_JSON_PATH", "./docs/export/export.json")
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def set_rule_status(rule_id, status):
|
|
40
|
+
"""
|
|
41
|
+
Call the manager.py script to set the status of a rule
|
|
42
|
+
|
|
43
|
+
(if the status is "active", the manager will remove the rule error)
|
|
44
|
+
"""
|
|
45
|
+
if not Path(MANAGER_PATH).exists():
|
|
46
|
+
app_logger.info(f"No manager, can't set rule {rule_id} status {status}")
|
|
47
|
+
return
|
|
48
|
+
subprocess.run([
|
|
49
|
+
"python", MANAGER_PATH,
|
|
50
|
+
"-R", rule_id,
|
|
51
|
+
"--rule-status", status],
|
|
52
|
+
cwd="/opt/webgenai")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def set_rule_error(rule_id, error):
|
|
56
|
+
"""
|
|
57
|
+
Call the manager.py script to set the error of a rule
|
|
58
|
+
"""
|
|
59
|
+
if not Path(MANAGER_PATH).exists():
|
|
60
|
+
app_logger.warning(f"No manager, can't set rule {rule_id} error {error}")
|
|
61
|
+
return
|
|
62
|
+
subprocess.check_output([
|
|
63
|
+
"python", MANAGER_PATH,
|
|
64
|
+
"-R", rule_id,
|
|
65
|
+
"--rule-error", error],
|
|
66
|
+
cwd="/opt/webgenai")
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def check_rule_code_syntax(rule_code):
|
|
70
|
+
"""
|
|
71
|
+
Check the syntax of the rule code
|
|
72
|
+
"""
|
|
73
|
+
try:
|
|
74
|
+
ast.parse(rule_code)
|
|
75
|
+
return rule_code
|
|
76
|
+
except Exception as exc:
|
|
77
|
+
log.warning(f"Syntax error in rule code '{rule_code}': {exc}")
|
|
78
|
+
|
|
79
|
+
rule_code = rule_code.replace("\\\\", "\\")
|
|
80
|
+
try:
|
|
81
|
+
ast.parse(rule_code)
|
|
82
|
+
return rule_code
|
|
83
|
+
except Exception as exc:
|
|
84
|
+
log.warning(f"Syntax error in rule code '{rule_code}': {exc}")
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def get_exported_rules(rule_code_dir):
|
|
89
|
+
"""
|
|
90
|
+
Read the exported rules from export.json and write the code to the
|
|
91
|
+
rule_code_dir
|
|
92
|
+
"""
|
|
93
|
+
export_file = Path(EXPORT_JSON_PATH)
|
|
94
|
+
if not export_file.exists():
|
|
95
|
+
app_logger.info(f"{export_file.resolve()} does not exist")
|
|
96
|
+
return []
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
with open(export_file) as f:
|
|
100
|
+
export = json.load(f)
|
|
101
|
+
rules = export.get("rules", [])
|
|
102
|
+
except Exception as exc:
|
|
103
|
+
app_logger.warning(f"Failed to load rules from {export_file}: {exc}")
|
|
104
|
+
return []
|
|
105
|
+
|
|
106
|
+
for rule in rules:
|
|
107
|
+
if rule["status"] == "rejected":
|
|
108
|
+
continue
|
|
109
|
+
rule_file = rule_code_dir / f"{secure_filename(rule['name']).replace('.','_')}.py"
|
|
110
|
+
try:
|
|
111
|
+
# write current rule to rule_file
|
|
112
|
+
# (we can't use eval, because logicbank uses inspect)
|
|
113
|
+
rule_code_str = check_rule_code_syntax(rule["code"])
|
|
114
|
+
if not rule_code_str:
|
|
115
|
+
continue
|
|
116
|
+
with open(rule_file, "w") as temp_file:
|
|
117
|
+
rule_code = "\n".join([f" {code}" for code in rule_code_str.split("\n")])
|
|
118
|
+
temp_file.write(rule_import_template.format(rule_code=rule_code))
|
|
119
|
+
temp_file_path = temp_file.name
|
|
120
|
+
# module_name used to import current rule
|
|
121
|
+
module_name = Path(temp_file_path).stem
|
|
122
|
+
rule["module_name"] = module_name
|
|
123
|
+
app_logger.info(f"{rule['id']} rule file: {rule_file}")
|
|
124
|
+
except Exception as exc:
|
|
125
|
+
app_logger.exception(exc)
|
|
126
|
+
app_logger.warning(f"Failed to write rule code to {rule_file}: {exc}")
|
|
127
|
+
|
|
128
|
+
return rules
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def verify_rules(rule_code_dir, rule_type="accepted"):
|
|
132
|
+
"""
|
|
133
|
+
Verify the rules from export.json and activate them if they pass verification
|
|
134
|
+
|
|
135
|
+
Write the rule code to a temporary file and import it as a module
|
|
136
|
+
"""
|
|
137
|
+
rules = get_exported_rules(rule_code_dir)
|
|
138
|
+
|
|
139
|
+
for rule in rules:
|
|
140
|
+
if not rule["status"] == rule_type:
|
|
141
|
+
continue
|
|
142
|
+
module_name = rule["module_name"]
|
|
143
|
+
app_logger.info(f"\n{Fore.BLUE}Verifying rule: {module_name} - {rule['id']}{Style.RESET_ALL}")
|
|
144
|
+
try:
|
|
145
|
+
rule_module = import_module(module_name)
|
|
146
|
+
rule_module.init_rule()
|
|
147
|
+
LogicBank.activate(session=safrs.DB.session, activator=rule_module.init_rule)
|
|
148
|
+
if rule["status"] != "active":
|
|
149
|
+
set_rule_status(rule["id"], "active")
|
|
150
|
+
app_logger.info(f"\n{Fore.GREEN}Activated rule {rule['id']}{Style.RESET_ALL}")
|
|
151
|
+
|
|
152
|
+
except Exception as exc:
|
|
153
|
+
app_logger.exception(exc)
|
|
154
|
+
set_rule_error(rule["id"], f"{type(exc).__name__}: {exc}")
|
|
155
|
+
app_logger.warning(f"{Fore.RED}Failed to verify {rule_type} rule code\n{rule['code']}\n{Fore.YELLOW}{type(exc).__name__}: {exc}{Style.RESET_ALL}")
|
|
156
|
+
app_logger.debug(f"{rule}")
|
|
157
|
+
rule["status"] = "accepted"
|
|
158
|
+
rule["error"] = f"{type(exc).__name__}: {exc}"
|
|
159
|
+
|
|
160
|
+
return rules
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def load_active_rules(rule_code_dir, rules=None):
|
|
164
|
+
"""
|
|
165
|
+
Load the active rules from export.json
|
|
166
|
+
"""
|
|
167
|
+
if not rules:
|
|
168
|
+
rules = get_exported_rules(rule_code_dir)
|
|
169
|
+
for rule in rules:
|
|
170
|
+
module_name = rule.get("module_name", None)
|
|
171
|
+
if not rule["status"] == "active" or module_name is None:
|
|
172
|
+
continue
|
|
173
|
+
app_logger.info(f"{Fore.GREEN}Loading Rule Module {module_name} {rule['id']} {Style.RESET_ALL}")
|
|
174
|
+
rule_module = import_module(module_name)
|
|
175
|
+
try:
|
|
176
|
+
rule_module.init_rule()
|
|
177
|
+
except Exception as exc:
|
|
178
|
+
app_logger.exception(exc)
|
|
179
|
+
app_logger.warning(f"{Fore.RED}Failed to load active rule {rule['id']} {rule['code']} {Style.RESET_ALL}")
|
|
180
|
+
set_rule_error(rule["id"], f"{type(exc).__name__}: {exc}")
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def get_project_id():
|
|
184
|
+
if os.environ.get("PROJECT_ID"):
|
|
185
|
+
return os.environ.get("PROJECT_ID")
|
|
186
|
+
|
|
187
|
+
return Path(os.getcwd()).name
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def load_verify_rules():
|
|
191
|
+
|
|
192
|
+
# Add FileHandler to root_logger
|
|
193
|
+
log_file = Path(os.getenv("LOG_DIR",tempfile.mkdtemp())) / "load_verify_rules.log"
|
|
194
|
+
file_handler = logging.FileHandler(log_file)
|
|
195
|
+
root_logger = logging.getLogger()
|
|
196
|
+
root_logger.addHandler(file_handler)
|
|
197
|
+
|
|
198
|
+
rule_code_dir = Path("./logic/wg_rules") # in the project root
|
|
199
|
+
rule_code_dir.mkdir(parents=True, exist_ok=True)
|
|
200
|
+
sys.path.append(f"{rule_code_dir}")
|
|
201
|
+
|
|
202
|
+
app_logger.info(f"Loading rules from {rule_code_dir.resolve()}")
|
|
203
|
+
|
|
204
|
+
rules = []
|
|
205
|
+
|
|
206
|
+
if os.environ.get("VERIFY_RULES") == "True":
|
|
207
|
+
rules = verify_rules(rule_code_dir, rule_type="active")
|
|
208
|
+
verify_rules(rule_code_dir, rule_type="accepted")
|
|
209
|
+
else:
|
|
210
|
+
try:
|
|
211
|
+
load_active_rules(rule_code_dir, rules)
|
|
212
|
+
except Exception as exc:
|
|
213
|
+
app_logger.exception(exc)
|
|
214
|
+
app_logger.warning(f"{Fore.RED}Failed to load active exported rules: {exc}{Style.RESET_ALL}")
|
|
215
|
+
|
|
216
|
+
root_logger.removeHandler(file_handler)
|
|
217
|
+
|