bb-integrations-library 3.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bb_integrations_lib/__init__.py +0 -0
- bb_integrations_lib/converters/__init__.py +0 -0
- bb_integrations_lib/gravitate/__init__.py +0 -0
- bb_integrations_lib/gravitate/base_api.py +20 -0
- bb_integrations_lib/gravitate/model.py +29 -0
- bb_integrations_lib/gravitate/pe_api.py +122 -0
- bb_integrations_lib/gravitate/rita_api.py +552 -0
- bb_integrations_lib/gravitate/sd_api.py +572 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
- bb_integrations_lib/gravitate/testing/__init__.py +0 -0
- bb_integrations_lib/gravitate/testing/builder.py +55 -0
- bb_integrations_lib/gravitate/testing/openapi.py +70 -0
- bb_integrations_lib/gravitate/testing/util.py +274 -0
- bb_integrations_lib/mappers/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/model.py +106 -0
- bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
- bb_integrations_lib/mappers/prices/protocol.py +20 -0
- bb_integrations_lib/mappers/prices/util.py +61 -0
- bb_integrations_lib/mappers/rita_mapper.py +523 -0
- bb_integrations_lib/models/__init__.py +0 -0
- bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
- bb_integrations_lib/models/enums.py +28 -0
- bb_integrations_lib/models/pipeline_structs.py +76 -0
- bb_integrations_lib/models/probe/probe_event.py +20 -0
- bb_integrations_lib/models/probe/request_data.py +431 -0
- bb_integrations_lib/models/probe/resume_token.py +7 -0
- bb_integrations_lib/models/rita/audit.py +113 -0
- bb_integrations_lib/models/rita/auth.py +30 -0
- bb_integrations_lib/models/rita/bucket.py +17 -0
- bb_integrations_lib/models/rita/config.py +188 -0
- bb_integrations_lib/models/rita/constants.py +19 -0
- bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
- bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
- bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
- bb_integrations_lib/models/rita/crossroads_network.py +41 -0
- bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
- bb_integrations_lib/models/rita/email.py +39 -0
- bb_integrations_lib/models/rita/issue.py +63 -0
- bb_integrations_lib/models/rita/mapping.py +227 -0
- bb_integrations_lib/models/rita/probe.py +58 -0
- bb_integrations_lib/models/rita/reference_data.py +110 -0
- bb_integrations_lib/models/rita/source_system.py +9 -0
- bb_integrations_lib/models/rita/workers.py +76 -0
- bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
- bb_integrations_lib/models/sd/get_order.py +301 -0
- bb_integrations_lib/models/sd/orders.py +18 -0
- bb_integrations_lib/models/sd_api.py +115 -0
- bb_integrations_lib/pipelines/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
- bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
- bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
- bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
- bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
- bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
- bb_integrations_lib/pipelines/shared/__init__.py +0 -0
- bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
- bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
- bb_integrations_lib/pipelines/steps/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
- bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
- bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
- bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
- bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
- bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
- bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
- bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
- bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
- bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
- bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
- bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
- bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
- bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
- bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
- bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
- bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
- bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
- bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
- bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
- bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
- bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
- bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
- bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
- bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
- bb_integrations_lib/pipelines/steps/null_step.py +15 -0
- bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
- bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
- bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
- bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
- bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
- bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
- bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
- bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
- bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
- bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
- bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
- bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
- bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
- bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
- bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
- bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
- bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
- bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
- bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
- bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
- bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
- bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
- bb_integrations_lib/protocols/__init__.py +0 -0
- bb_integrations_lib/protocols/flat_file.py +210 -0
- bb_integrations_lib/protocols/gravitate_client.py +104 -0
- bb_integrations_lib/protocols/pipelines.py +697 -0
- bb_integrations_lib/provider/__init__.py +0 -0
- bb_integrations_lib/provider/api/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/client.py +43 -0
- bb_integrations_lib/provider/api/cargas/model.py +49 -0
- bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
- bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
- bb_integrations_lib/provider/api/dtn/client.py +128 -0
- bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
- bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
- bb_integrations_lib/provider/api/keyvu/client.py +30 -0
- bb_integrations_lib/provider/api/keyvu/model.py +149 -0
- bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/macropoint/client.py +28 -0
- bb_integrations_lib/provider/api/macropoint/model.py +40 -0
- bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
- bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
- bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
- bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
- bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
- bb_integrations_lib/provider/api/platform_science/client.py +147 -0
- bb_integrations_lib/provider/api/platform_science/model.py +82 -0
- bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
- bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
- bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/telapoint/client.py +68 -0
- bb_integrations_lib/provider/api/telapoint/model.py +178 -0
- bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
- bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
- bb_integrations_lib/provider/aws/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/client.py +126 -0
- bb_integrations_lib/provider/ftp/__init__.py +0 -0
- bb_integrations_lib/provider/ftp/client.py +140 -0
- bb_integrations_lib/provider/ftp/interface.py +273 -0
- bb_integrations_lib/provider/ftp/model.py +76 -0
- bb_integrations_lib/provider/imap/__init__.py +0 -0
- bb_integrations_lib/provider/imap/client.py +228 -0
- bb_integrations_lib/provider/imap/model.py +3 -0
- bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
- bb_integrations_lib/provider/sqlserver/client.py +106 -0
- bb_integrations_lib/secrets/__init__.py +4 -0
- bb_integrations_lib/secrets/adapters.py +98 -0
- bb_integrations_lib/secrets/credential_models.py +222 -0
- bb_integrations_lib/secrets/factory.py +85 -0
- bb_integrations_lib/secrets/providers.py +160 -0
- bb_integrations_lib/shared/__init__.py +0 -0
- bb_integrations_lib/shared/exceptions.py +25 -0
- bb_integrations_lib/shared/model.py +1039 -0
- bb_integrations_lib/shared/shared_enums.py +510 -0
- bb_integrations_lib/storage/README.md +236 -0
- bb_integrations_lib/storage/__init__.py +0 -0
- bb_integrations_lib/storage/aws/__init__.py +0 -0
- bb_integrations_lib/storage/aws/s3.py +8 -0
- bb_integrations_lib/storage/defaults.py +72 -0
- bb_integrations_lib/storage/gcs/__init__.py +0 -0
- bb_integrations_lib/storage/gcs/client.py +8 -0
- bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
- bb_integrations_lib/storage/gcsmanager/client.py +8 -0
- bb_integrations_lib/storage/setup.py +29 -0
- bb_integrations_lib/util/__init__.py +0 -0
- bb_integrations_lib/util/cache/__init__.py +0 -0
- bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
- bb_integrations_lib/util/cache/protocol.py +9 -0
- bb_integrations_lib/util/config/__init__.py +0 -0
- bb_integrations_lib/util/config/manager.py +391 -0
- bb_integrations_lib/util/config/model.py +41 -0
- bb_integrations_lib/util/exception_logger/__init__.py +0 -0
- bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
- bb_integrations_lib/util/exception_logger/test.py +114 -0
- bb_integrations_lib/util/utils.py +364 -0
- bb_integrations_lib/workers/__init__.py +0 -0
- bb_integrations_lib/workers/groups.py +13 -0
- bb_integrations_lib/workers/rpc_worker.py +50 -0
- bb_integrations_lib/workers/topics.py +20 -0
- bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
- bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
- bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
|
File without changes
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import time
|
|
2
|
+
|
|
3
|
+
from loguru import logger
|
|
4
|
+
|
|
5
|
+
from bb_integrations_lib.gravitate.testing.util import generate_model_validation_tests, \
|
|
6
|
+
generate_pydantic_models_from_open_api
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class TestBuilder:
|
|
10
|
+
def __init__(self,
|
|
11
|
+
client: str,
|
|
12
|
+
system: str):
|
|
13
|
+
self.client = client
|
|
14
|
+
self.system = system
|
|
15
|
+
|
|
16
|
+
@property
|
|
17
|
+
def models_file_path(self):
|
|
18
|
+
return f"./{self.client}/{self.system}/models.py"
|
|
19
|
+
|
|
20
|
+
@property
|
|
21
|
+
def tests_file_path(self):
|
|
22
|
+
return f"./{self.client}/{self.system}/tests/test_models.py"
|
|
23
|
+
|
|
24
|
+
@property
|
|
25
|
+
def open_api_url(self):
|
|
26
|
+
urls = {
|
|
27
|
+
"sd": f"https:/{self.client}.bb.gravitate.energy/api/openapi.json/internal",
|
|
28
|
+
"rita": "https://rita.gravitate.energy/api/openapi.json"
|
|
29
|
+
}
|
|
30
|
+
return urls[self.system]
|
|
31
|
+
|
|
32
|
+
def build_tests(self):
|
|
33
|
+
try:
|
|
34
|
+
generate_pydantic_models_from_open_api(
|
|
35
|
+
open_api_url=self.open_api_url,
|
|
36
|
+
save_file_to_path=self.models_file_path
|
|
37
|
+
)
|
|
38
|
+
time.sleep(1)
|
|
39
|
+
generate_model_validation_tests(
|
|
40
|
+
models_file_path=self.models_file_path,
|
|
41
|
+
tests_file_path=self.tests_file_path
|
|
42
|
+
)
|
|
43
|
+
except Exception as e:
|
|
44
|
+
logger.error(f"Failed to build tests: {e}")
|
|
45
|
+
raise
|
|
46
|
+
|
|
47
|
+
@classmethod
|
|
48
|
+
def for_client_and_system(cls, client: str, system: str):
|
|
49
|
+
instance = cls(client=client, system=system)
|
|
50
|
+
instance.build_tests()
|
|
51
|
+
return instance
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
if __name__ == "__main__":
|
|
55
|
+
test_builder = TestBuilder.for_client_and_system("tte", "sd")
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import json
|
|
2
|
+
|
|
3
|
+
import httpx
|
|
4
|
+
from functools import lru_cache
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@lru_cache(maxsize=10)
|
|
8
|
+
def get_openapi(url: str) -> dict:
|
|
9
|
+
"""Download OpenAPI schema from URL and return as dict."""
|
|
10
|
+
response = httpx.get(url)
|
|
11
|
+
response.raise_for_status()
|
|
12
|
+
return response.json()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def get_updated_supply_and_dispatch_openapi_json(url: str, schemas_to_include: list[str]):
|
|
16
|
+
"""Update the supply and dispatch JSON file with the latest published schemas."""
|
|
17
|
+
schema = get_openapi(url)
|
|
18
|
+
fully_filtered = filter_schemas_by_references(filter_based_on_tag(schema, schemas_to_include))
|
|
19
|
+
return json.dumps(fully_filtered)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def filter_based_on_tag(data: dict, criteria: list[str]) -> dict:
|
|
23
|
+
"""Filter OpenAPI paths based on tags."""
|
|
24
|
+
filtered_data = data.copy()
|
|
25
|
+
filtered_paths = {}
|
|
26
|
+
|
|
27
|
+
for path, methods in data.get("paths", {}).items():
|
|
28
|
+
matching_methods = {}
|
|
29
|
+
for method, operation in methods.items():
|
|
30
|
+
if isinstance(operation, dict):
|
|
31
|
+
tags = operation.get("tags", [])
|
|
32
|
+
if any(tag in tags for tag in criteria):
|
|
33
|
+
matching_methods[method] = operation
|
|
34
|
+
if matching_methods:
|
|
35
|
+
filtered_paths[path] = matching_methods
|
|
36
|
+
|
|
37
|
+
filtered_data["paths"] = filtered_paths
|
|
38
|
+
return filtered_data
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def filter_schemas_by_references(data: dict) -> dict:
|
|
42
|
+
filtered_data = data.copy()
|
|
43
|
+
referenced_schemas = set()
|
|
44
|
+
for path, methods in data.get("paths", {}).items():
|
|
45
|
+
for method, operation in methods.items():
|
|
46
|
+
if isinstance(operation, dict):
|
|
47
|
+
_collect_schema_references(operation, referenced_schemas)
|
|
48
|
+
if "components" in data and "schemas" in data["components"]:
|
|
49
|
+
filtered_data["components"] = data["components"].copy()
|
|
50
|
+
filtered_data["components"]["schemas"] = {
|
|
51
|
+
name: schema for name, schema in data["components"]["schemas"].items()
|
|
52
|
+
if name in referenced_schemas
|
|
53
|
+
}
|
|
54
|
+
return filtered_data
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _collect_schema_references(obj, referenced_schemas):
|
|
58
|
+
if isinstance(obj, dict):
|
|
59
|
+
for key, value in obj.items():
|
|
60
|
+
if key == "$ref" and isinstance(value, str):
|
|
61
|
+
if value.startswith("#/components/schemas/"):
|
|
62
|
+
schema_name = value.split("/")[-1]
|
|
63
|
+
referenced_schemas.add(schema_name)
|
|
64
|
+
else:
|
|
65
|
+
_collect_schema_references(value, referenced_schemas)
|
|
66
|
+
elif isinstance(obj, list):
|
|
67
|
+
for item in obj:
|
|
68
|
+
_collect_schema_references(item, referenced_schemas)
|
|
69
|
+
|
|
70
|
+
|
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
import importlib.util
|
|
2
|
+
import inspect
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import get_origin, Union, get_args, Any, Dict
|
|
6
|
+
import time
|
|
7
|
+
from datamodel_code_generator import generate, DataModelType, InputFileType
|
|
8
|
+
from loguru import logger
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
|
|
11
|
+
from bb_integrations_lib.gravitate.testing.openapi import get_updated_supply_and_dispatch_openapi_json
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def ensure_directory_exists(file_path: Path) -> None:
|
|
15
|
+
"""Ensure the parent directory of a file path exists."""
|
|
16
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
17
|
+
return None
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def generate_pydantic_models_from_open_api(open_api_url: str,
|
|
21
|
+
save_file_to_path: str,
|
|
22
|
+
schemas_to_include: list[str] = None,
|
|
23
|
+
open_api_json: dict = None) -> None:
|
|
24
|
+
try:
|
|
25
|
+
openapi_data = open_api_json or get_updated_supply_and_dispatch_openapi_json(
|
|
26
|
+
open_api_url,
|
|
27
|
+
schemas_to_include or ['V1', 'V2'])
|
|
28
|
+
if isinstance(openapi_data, str):
|
|
29
|
+
try:
|
|
30
|
+
openapi_json = json.loads(openapi_data)
|
|
31
|
+
logger.info("Parsed OpenAPI JSON string to dictionary")
|
|
32
|
+
except json.JSONDecodeError as e:
|
|
33
|
+
logger.error(f"Failed to parse OpenAPI JSON string: {e}")
|
|
34
|
+
raise
|
|
35
|
+
else:
|
|
36
|
+
openapi_json = openapi_data
|
|
37
|
+
if not isinstance(openapi_json, dict):
|
|
38
|
+
raise ValueError(f"Expected dict or JSON string, got {type(openapi_json)}")
|
|
39
|
+
save_path = Path(save_file_to_path)
|
|
40
|
+
temp_openapi_file = save_path.parent / "temp_openapi.json"
|
|
41
|
+
ensure_directory_exists(save_path)
|
|
42
|
+
ensure_directory_exists(temp_openapi_file)
|
|
43
|
+
with open(temp_openapi_file, 'w') as f:
|
|
44
|
+
json.dump(openapi_json, f, indent=2)
|
|
45
|
+
generate(
|
|
46
|
+
input_=temp_openapi_file,
|
|
47
|
+
output=save_path,
|
|
48
|
+
output_model_type=DataModelType.PydanticV2BaseModel
|
|
49
|
+
)
|
|
50
|
+
if temp_openapi_file.exists():
|
|
51
|
+
temp_openapi_file.unlink()
|
|
52
|
+
logger.info(f"Successfully generated Pydantic models at {save_file_to_path}")
|
|
53
|
+
except Exception as e:
|
|
54
|
+
logger.error(f"Error generating pydantic models: {e}")
|
|
55
|
+
temp_openapi_file = Path(save_file_to_path).parent / "temp_openapi.json"
|
|
56
|
+
if temp_openapi_file.exists():
|
|
57
|
+
temp_openapi_file.unlink()
|
|
58
|
+
raise
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def generate_model_validation_tests(models_file_path: str, tests_file_path: str) -> None:
|
|
63
|
+
"""Generate validation tests for all Pydantic models in the models file."""
|
|
64
|
+
try:
|
|
65
|
+
if not Path(models_file_path).exists():
|
|
66
|
+
logger.error(f"Models file does not exist: {models_file_path}")
|
|
67
|
+
return
|
|
68
|
+
models = load_pydantic_models_from_file(models_file_path)
|
|
69
|
+
if not models:
|
|
70
|
+
logger.warning(f"No Pydantic models found in {models_file_path}")
|
|
71
|
+
return
|
|
72
|
+
test_data = {}
|
|
73
|
+
for model_name, model_class in models.items():
|
|
74
|
+
try:
|
|
75
|
+
example_input = generate_example_input(model_class)
|
|
76
|
+
test_data[model_name] = example_input
|
|
77
|
+
except Exception as e:
|
|
78
|
+
logger.warning(f"Could not generate example for {model_name}: {e}")
|
|
79
|
+
test_data[model_name] = {}
|
|
80
|
+
create_test_file(tests_file_path, models_file_path, test_data)
|
|
81
|
+
|
|
82
|
+
logger.info(f"Generated validation tests for {len(models)} models in {tests_file_path}")
|
|
83
|
+
|
|
84
|
+
except Exception as e:
|
|
85
|
+
logger.error(f"Error generating model validation tests: {e}")
|
|
86
|
+
raise
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def load_pydantic_models_from_file(file_path: str) -> Dict[str, BaseModel]:
|
|
90
|
+
models = {}
|
|
91
|
+
try:
|
|
92
|
+
spec = importlib.util.spec_from_file_location("models", file_path)
|
|
93
|
+
if spec is None or spec.loader is None:
|
|
94
|
+
logger.error(f"Could not load module spec from {file_path}")
|
|
95
|
+
return models
|
|
96
|
+
|
|
97
|
+
models_module = importlib.util.module_from_spec(spec)
|
|
98
|
+
spec.loader.exec_module(models_module)
|
|
99
|
+
for name in dir(models_module):
|
|
100
|
+
try:
|
|
101
|
+
obj = getattr(models_module, name)
|
|
102
|
+
if (inspect.isclass(obj) and
|
|
103
|
+
issubclass(obj, BaseModel) and
|
|
104
|
+
obj is not BaseModel):
|
|
105
|
+
models[name] = obj
|
|
106
|
+
except Exception as e:
|
|
107
|
+
logger.debug(f"Skipping {name}: {e}")
|
|
108
|
+
continue
|
|
109
|
+
|
|
110
|
+
except Exception as e:
|
|
111
|
+
logger.error(f"Error loading models from {file_path}: {e}")
|
|
112
|
+
|
|
113
|
+
return models
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def generate_example_input(model_class: BaseModel) -> Dict[str, Any]:
|
|
117
|
+
example = {}
|
|
118
|
+
try:
|
|
119
|
+
if hasattr(model_class, 'model_fields'):
|
|
120
|
+
fields = model_class.model_fields
|
|
121
|
+
for field_name, field_info in fields.items():
|
|
122
|
+
try:
|
|
123
|
+
example[field_name] = generate_field_example(field_info.annotation, field_name)
|
|
124
|
+
except Exception as e:
|
|
125
|
+
logger.debug(f"Could not generate example for field {field_name}: {e}")
|
|
126
|
+
example[field_name] = f"example_{field_name}"
|
|
127
|
+
elif hasattr(model_class, '__fields__'):
|
|
128
|
+
fields = model_class.__fields__
|
|
129
|
+
for field_name, field_info in fields.items():
|
|
130
|
+
try:
|
|
131
|
+
example[field_name] = generate_field_example(field_info.type_, field_name)
|
|
132
|
+
except Exception as e:
|
|
133
|
+
logger.debug(f"Could not generate example for field {field_name}: {e}")
|
|
134
|
+
example[field_name] = f"example_{field_name}"
|
|
135
|
+
else:
|
|
136
|
+
logger.warning(f"Could not find fields for model {model_class.__name__}")
|
|
137
|
+
except Exception as e:
|
|
138
|
+
logger.warning(f"Error generating example input for {model_class.__name__}: {e}")
|
|
139
|
+
|
|
140
|
+
return example
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def generate_field_example(field_type: Any, field_name: str) -> Any:
|
|
144
|
+
"""Generate example value for a field type."""
|
|
145
|
+
try:
|
|
146
|
+
origin = get_origin(field_type)
|
|
147
|
+
args = get_args(field_type)
|
|
148
|
+
if origin is Union:
|
|
149
|
+
non_none_types = [arg for arg in args if arg is not type(None)]
|
|
150
|
+
if non_none_types:
|
|
151
|
+
field_type = non_none_types[0]
|
|
152
|
+
if origin is list:
|
|
153
|
+
if args:
|
|
154
|
+
item_type = args[0]
|
|
155
|
+
return [generate_field_example(item_type, f"{field_name}_item")]
|
|
156
|
+
return ["example_item"]
|
|
157
|
+
if origin is dict:
|
|
158
|
+
return {"key": "value"}
|
|
159
|
+
if field_type == str:
|
|
160
|
+
return f"example_{field_name}"
|
|
161
|
+
elif field_type == int:
|
|
162
|
+
return 42
|
|
163
|
+
elif field_type == float:
|
|
164
|
+
return 3.14
|
|
165
|
+
elif field_type == bool:
|
|
166
|
+
return True
|
|
167
|
+
elif field_type == list:
|
|
168
|
+
return ["example_item"]
|
|
169
|
+
elif field_type == dict:
|
|
170
|
+
return {"key": "value"}
|
|
171
|
+
else:
|
|
172
|
+
return f"example_{field_name}"
|
|
173
|
+
except Exception as e:
|
|
174
|
+
logger.debug(f"Error generating field example for {field_name}: {e}")
|
|
175
|
+
return f"example_{field_name}"
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def create_test_file(tests_file_path: str, models_file_path: str, test_data: Dict[str, Dict]) -> None:
|
|
179
|
+
"""Create the test file with validation tests for all models."""
|
|
180
|
+
tests_path = Path(tests_file_path)
|
|
181
|
+
ensure_directory_exists(tests_path)
|
|
182
|
+
models_import_path = Path(models_file_path).stem
|
|
183
|
+
valid_test_data = {k: v for k, v in test_data.items() if v}
|
|
184
|
+
if not valid_test_data:
|
|
185
|
+
logger.warning("No valid test data found, creating minimal test file")
|
|
186
|
+
test_content = f'''"""
|
|
187
|
+
Auto-generated validation tests for Pydantic models.
|
|
188
|
+
Generated on: {time.strftime('%Y-%m-%d %H:%M:%S')}
|
|
189
|
+
No valid models found for testing.
|
|
190
|
+
"""
|
|
191
|
+
import pytest
|
|
192
|
+
|
|
193
|
+
def test_placeholder():
|
|
194
|
+
"""Placeholder test - no valid models found."""
|
|
195
|
+
assert True
|
|
196
|
+
'''
|
|
197
|
+
else:
|
|
198
|
+
test_content = f'''"""
|
|
199
|
+
Auto-generated validation tests for Pydantic models.
|
|
200
|
+
Generated on: {time.strftime('%Y-%m-%d %H:%M:%S')}
|
|
201
|
+
"""
|
|
202
|
+
import pytest
|
|
203
|
+
from typing import Dict, Any
|
|
204
|
+
from pydantic import BaseModel, ValidationError
|
|
205
|
+
from {models_import_path} import {", ".join(valid_test_data.keys())}
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def generic_model_validation_test(model_to_test: BaseModel, example_input: Dict[str, Any]) -> None:
|
|
209
|
+
"""
|
|
210
|
+
Generic test function that validates a model with example input.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
model_to_test: The Pydantic model class to test
|
|
214
|
+
example_input: Dictionary with example input data
|
|
215
|
+
"""
|
|
216
|
+
try:
|
|
217
|
+
# Test successful validation
|
|
218
|
+
validated_model = model_to_test.model_validate(example_input)
|
|
219
|
+
assert validated_model is not None
|
|
220
|
+
|
|
221
|
+
# Test that we can convert back to dict
|
|
222
|
+
model_dict = validated_model.model_dump()
|
|
223
|
+
assert isinstance(model_dict, dict)
|
|
224
|
+
|
|
225
|
+
except ValidationError as e:
|
|
226
|
+
pytest.fail(f"Validation failed for {{model_to_test.__name__}}: {{e}}")
|
|
227
|
+
except Exception as e:
|
|
228
|
+
pytest.fail(f"Unexpected error for {{model_to_test.__name__}}: {{e}}")
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
# Individual test functions for each model
|
|
232
|
+
'''
|
|
233
|
+
|
|
234
|
+
# Generate individual test functions for each model
|
|
235
|
+
for model_name, example_data in valid_test_data.items():
|
|
236
|
+
test_content += f'''
|
|
237
|
+
def test_{model_name.lower()}_validation():
|
|
238
|
+
"""Test validation for {model_name} model."""
|
|
239
|
+
example_input = {json.dumps(example_data, indent=4)}
|
|
240
|
+
|
|
241
|
+
generic_model_validation_test({model_name}, example_input)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def test_{model_name.lower()}_validation_with_invalid_data():
|
|
245
|
+
"""Test validation failure for {model_name} model with invalid data."""
|
|
246
|
+
invalid_input = {{"invalid_field": "should_fail"}}
|
|
247
|
+
|
|
248
|
+
with pytest.raises(ValidationError):
|
|
249
|
+
{model_name}.model_validate(invalid_input)
|
|
250
|
+
'''
|
|
251
|
+
|
|
252
|
+
# Add parametrized test for all models
|
|
253
|
+
if valid_test_data:
|
|
254
|
+
test_content += f'''
|
|
255
|
+
|
|
256
|
+
# Parametrized test for all models
|
|
257
|
+
@pytest.mark.parametrize("model_class,example_input", [
|
|
258
|
+
'''
|
|
259
|
+
|
|
260
|
+
for model_name, example_data in valid_test_data.items():
|
|
261
|
+
test_content += f' ({model_name}, {json.dumps(example_data)}),\n'
|
|
262
|
+
|
|
263
|
+
test_content += '''
|
|
264
|
+
])
|
|
265
|
+
def test_all_models_validation(model_class: BaseModel, example_input: Dict[str, Any]):
|
|
266
|
+
"""Parametrized test for all models."""
|
|
267
|
+
generic_model_validation_test(model_class, example_input)
|
|
268
|
+
'''
|
|
269
|
+
|
|
270
|
+
# Write the test file
|
|
271
|
+
with open(tests_path, 'w') as f:
|
|
272
|
+
f.write(test_content)
|
|
273
|
+
|
|
274
|
+
logger.info(f"Created test file at {tests_file_path}")
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Optional, Dict, Union, List
|
|
3
|
+
|
|
4
|
+
from mergedeep import Strategy
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
|
|
7
|
+
from bb_integrations_lib.gravitate.model import System
|
|
8
|
+
from bb_integrations_lib.models.rita.mapping import Map
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class Action(str, Enum):
|
|
12
|
+
start = "start"
|
|
13
|
+
stop = "stop"
|
|
14
|
+
error = "error"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class IntegrationType(str, Enum):
|
|
18
|
+
"""Denotes available integration types for a Mapper"""
|
|
19
|
+
sql = "sql"
|
|
20
|
+
"""Strictly limited to an MS SQL server database"""
|
|
21
|
+
rita = "rita"
|
|
22
|
+
"""Strictly limited to rita mappings"""
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class IntegrationMappingConfig(BaseModel):
|
|
26
|
+
type: IntegrationType | System = None
|
|
27
|
+
"""The integration type to pull mappings from"""
|
|
28
|
+
external_id_field: Optional[str] = None
|
|
29
|
+
"""External: to Gravitate. Denotes the id field to be used"""
|
|
30
|
+
gravitate_id_field: Optional[str] = None
|
|
31
|
+
"""The Gravitate id field"""
|
|
32
|
+
gravitate_name_field: Optional[str] = None
|
|
33
|
+
external_name_field: Optional[str] = None
|
|
34
|
+
query: Optional[str] = None
|
|
35
|
+
"""An optional query string"""
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class EntityConfig(BaseModel):
|
|
39
|
+
mapping_enabled: Optional[bool] = True
|
|
40
|
+
mapping_integration: Optional[IntegrationMappingConfig] = {}
|
|
41
|
+
external_system_integration: Optional[IntegrationMappingConfig] = {}
|
|
42
|
+
|
|
43
|
+
class PricePublisher(BaseModel):
|
|
44
|
+
id: Optional[str] = None
|
|
45
|
+
name: str
|
|
46
|
+
price_type: str
|
|
47
|
+
extend_by_days: Optional[int] = None
|
|
48
|
+
|
|
49
|
+
class PricingStrategy(str, Enum):
|
|
50
|
+
use_latest = "Use Latest"
|
|
51
|
+
"""Only includes latest price -> may miss intraday changes"""
|
|
52
|
+
use_prior_to_latest = "Use Prior to Latest"
|
|
53
|
+
"""Includes both the latest and latest minus one"""
|
|
54
|
+
use_historic ="Use Historic"
|
|
55
|
+
"""Includes up to 10 historic changes for instrument since previous workday"""
|
|
56
|
+
|
|
57
|
+
@property
|
|
58
|
+
def strategy_includes(self) -> int:
|
|
59
|
+
if self == PricingStrategy.use_latest:
|
|
60
|
+
return 1
|
|
61
|
+
if self == PricingStrategy.use_prior_to_latest:
|
|
62
|
+
return 2
|
|
63
|
+
if self == PricingStrategy.use_historic:
|
|
64
|
+
return 10
|
|
65
|
+
|
|
66
|
+
class PricingIntegrationConfig(BaseModel):
|
|
67
|
+
environment: str
|
|
68
|
+
"""The customer environment; i.e. TTE"""
|
|
69
|
+
price_publishers: List[PricePublisher]
|
|
70
|
+
"""The list of price publisher from which to pull prices"""
|
|
71
|
+
entity_config: Dict[str, EntityConfig] = {}
|
|
72
|
+
"""A key: EntityConfig pair, describing an entity config. i.e. {'products': EntityConfig}"""
|
|
73
|
+
price_mapper_ttl: Optional[int] = 3600
|
|
74
|
+
"""The ttl cache release for the mapper"""
|
|
75
|
+
price_mapper_debug_mode: Optional[bool] = False
|
|
76
|
+
"""Debug mode to enable verbose logging"""
|
|
77
|
+
source_system: Optional[str] = None
|
|
78
|
+
"""The source system from where prices originate"""
|
|
79
|
+
source_system_id: Optional[str] = None
|
|
80
|
+
"""The source system id from where prices originate"""
|
|
81
|
+
use_contract_id: bool = True
|
|
82
|
+
strategy: Optional[PricingStrategy] = PricingStrategy.use_historic
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class Group(BaseModel):
|
|
87
|
+
name: Optional[str] = None
|
|
88
|
+
ids: list[str]
|
|
89
|
+
length: int
|
|
90
|
+
extra_data: Optional[Union[Dict, Map]] = None
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class Groups(BaseModel):
|
|
94
|
+
product_groups: Optional[Dict[str, Group]] = None
|
|
95
|
+
location_groups: Optional[Dict[str, Group]] = None
|
|
96
|
+
supplier_groups: Optional[Dict[str, Group]] = None
|
|
97
|
+
price_publisher_groups: Optional[Dict[str, Group]] = None
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class PriceMappings(BaseModel):
|
|
101
|
+
product_mappings: Union[Dict, List]
|
|
102
|
+
location_mappings: Union[Dict, List]
|
|
103
|
+
supplier_mappings: Union[Dict, List]
|
|
104
|
+
price_publishers: Union[Dict, List]
|
|
105
|
+
|
|
106
|
+
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
from typing import Optional, Union, List, Dict
|
|
2
|
+
from collections.abc import Iterable
|
|
3
|
+
from loguru import logger
|
|
4
|
+
|
|
5
|
+
from bb_integrations_lib.gravitate.rita_api import RitaBackendAPI
|
|
6
|
+
from bb_integrations_lib.mappers.prices.model import PriceMappings, Group, PricingIntegrationConfig,\
|
|
7
|
+
IntegrationMappingConfig
|
|
8
|
+
from bb_integrations_lib.mappers.prices.util import PricingIntegrationGetter, PricingIntegrationGetterProtocol
|
|
9
|
+
from bb_integrations_lib.models.rita.mapping import Map
|
|
10
|
+
from bb_integrations_lib.provider.sqlserver.client import SQLServerClient
|
|
11
|
+
from bb_integrations_lib.util.cache.custom_ttl_cache import CustomTTLCache
|
|
12
|
+
from bb_integrations_lib.util.cache.protocol import CustomTTLCacheProtocol
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class PriceMapper:
|
|
16
|
+
"""Mapper instance to get all mapping data for needed for prices.
|
|
17
|
+
Limited to:
|
|
18
|
+
- Product, Location, Supplier and Publisher mappings
|
|
19
|
+
"""
|
|
20
|
+
def __init__(
|
|
21
|
+
self,
|
|
22
|
+
config: PricingIntegrationConfig,
|
|
23
|
+
ttl: int = 3600,
|
|
24
|
+
debug_mode: bool = False,
|
|
25
|
+
source_system: Optional[str] = None
|
|
26
|
+
):
|
|
27
|
+
self.ttl = ttl
|
|
28
|
+
self.config = config
|
|
29
|
+
self.products_config: IntegrationMappingConfig = self.config.entity_config.get("products").mapping_integration
|
|
30
|
+
self.locations_config: IntegrationMappingConfig = self.config.entity_config.get("locations").mapping_integration
|
|
31
|
+
self.suppliers_config: IntegrationMappingConfig = self.config.entity_config.get("suppliers").mapping_integration
|
|
32
|
+
self.publishers_config: IntegrationMappingConfig = self.config.entity_config.get("publishers").mapping_integration
|
|
33
|
+
self.products_sql_query = self.products_config.query
|
|
34
|
+
self.locations_sql_query = self.locations_config.query
|
|
35
|
+
self.suppliers_sql_query = self.suppliers_config.query
|
|
36
|
+
self.cache: CustomTTLCacheProtocol = CustomTTLCache(verbose=debug_mode)
|
|
37
|
+
self.debug_mode = debug_mode
|
|
38
|
+
self.source_system = source_system
|
|
39
|
+
|
|
40
|
+
if debug_mode:
|
|
41
|
+
logger.debug("DEBUG MODE ON")
|
|
42
|
+
self.integration_getter: PricingIntegrationGetterProtocol = PricingIntegrationGetter(self.config)
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def ttl_cache(self):
|
|
46
|
+
return self.cache
|
|
47
|
+
|
|
48
|
+
async def _get_cached_data(self, cache_key: str, query_func) -> Iterable:
|
|
49
|
+
"""Generic method to retrieve cached data or fetch it using the provided query function"""
|
|
50
|
+
|
|
51
|
+
@self.cache.ttl_cache(seconds=self.ttl, cache_key=cache_key)
|
|
52
|
+
async def _get_data() -> Iterable:
|
|
53
|
+
return await query_func() or {}
|
|
54
|
+
return await _get_data()
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
async def get_entity_mappings(self, key: str, mapping_type: Optional[str] = None, query: Optional[str] = None) -> Iterable:
|
|
58
|
+
client = self.integration_getter.get_integration_mapping_client_by_entity(key)
|
|
59
|
+
|
|
60
|
+
async def query_func():
|
|
61
|
+
if isinstance(client, SQLServerClient):
|
|
62
|
+
return client.get_mappings(query)
|
|
63
|
+
elif isinstance(client, RitaBackendAPI):
|
|
64
|
+
return await client.get_mappings(source_system=self.source_system,
|
|
65
|
+
mapping_type=mapping_type)
|
|
66
|
+
return {}
|
|
67
|
+
return await self._get_cached_data(key, query_func)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
async def get_mappings(
|
|
71
|
+
self,
|
|
72
|
+
product_key: Optional[str] = "products",
|
|
73
|
+
location_key: Optional[str] = "locations",
|
|
74
|
+
supplier_key: Optional[str] = "suppliers",
|
|
75
|
+
price_publisher_key: Optional[str] = "publishers"
|
|
76
|
+
) -> PriceMappings:
|
|
77
|
+
return PriceMappings(
|
|
78
|
+
product_mappings=await self.get_entity_mappings(product_key, mapping_type="product", query=self.products_sql_query),
|
|
79
|
+
location_mappings=await self.get_entity_mappings(location_key, mapping_type="location", query=self.locations_sql_query),
|
|
80
|
+
supplier_mappings=await self.get_entity_mappings(supplier_key, mapping_type="supplier", query=self.suppliers_sql_query),
|
|
81
|
+
price_publishers=await self.get_entity_mappings(price_publisher_key, mapping_type="other")
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
@classmethod
|
|
85
|
+
def group_rows(
|
|
86
|
+
cls,
|
|
87
|
+
rows: Union[List[Map], List[Dict]],
|
|
88
|
+
external_id_field: str,
|
|
89
|
+
gravitate_id_field: str,
|
|
90
|
+
name_field: Optional[str] = None,
|
|
91
|
+
is_rita: Optional[bool] = False,
|
|
92
|
+
) -> Dict[
|
|
93
|
+
str, Group]:
|
|
94
|
+
grouped = {}
|
|
95
|
+
if is_rita:
|
|
96
|
+
rows: List[Map]
|
|
97
|
+
for row in rows:
|
|
98
|
+
row_id = row.source_id
|
|
99
|
+
children = [r.gravitate_id for r in row.children]
|
|
100
|
+
if row_id not in grouped:
|
|
101
|
+
grouped[row_id] = Group(
|
|
102
|
+
name=row.gravitate_name,
|
|
103
|
+
ids=children,
|
|
104
|
+
length=len(children),
|
|
105
|
+
extra_data=row
|
|
106
|
+
)
|
|
107
|
+
return grouped
|
|
108
|
+
else:
|
|
109
|
+
rows: List[Dict]
|
|
110
|
+
for row in rows:
|
|
111
|
+
row_id = row[external_id_field]
|
|
112
|
+
row_guid = row[gravitate_id_field]
|
|
113
|
+
row_name = row.get(name_field) if name_field else None
|
|
114
|
+
if row_id not in grouped:
|
|
115
|
+
grouped[str(row_id)] = Group(
|
|
116
|
+
name=row_name,
|
|
117
|
+
ids=[row_guid],
|
|
118
|
+
length=1,
|
|
119
|
+
extra_data=row
|
|
120
|
+
)
|
|
121
|
+
else:
|
|
122
|
+
group = grouped[row_id]
|
|
123
|
+
group.ids.append(row_guid)
|
|
124
|
+
group.length += 1
|
|
125
|
+
return grouped
|
|
126
|
+
|
|
127
|
+
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from typing import Protocol, Optional, Iterable, Dict, Any, runtime_checkable
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class PriceMapperProtocol(Protocol):
|
|
5
|
+
async def get_product_mappings(self, product_key: Optional[str]) -> Iterable:
|
|
6
|
+
"""Gets product mappings from integration"""
|
|
7
|
+
|
|
8
|
+
async def get_location_mappings(self, location_key: Optional[str]) -> Iterable:
|
|
9
|
+
"""Gets location mappings from integration"""
|
|
10
|
+
|
|
11
|
+
async def get_supplier_mappings(self, supplier_key: Optional[str]) -> Iterable:
|
|
12
|
+
"""Gets supplier mappings from integration"""
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@runtime_checkable
|
|
16
|
+
class ExternalPriceMapperIntegration(Protocol):
|
|
17
|
+
def get_mappings(self, query: Optional[str] = None, source_system: Optional[str] = None,
|
|
18
|
+
mapping_type: Optional[str] = None,
|
|
19
|
+
params: Optional[Dict[str, Any]] = None):
|
|
20
|
+
"""Gets mappings from integration"""
|