bb-integrations-library 3.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bb_integrations_lib/__init__.py +0 -0
- bb_integrations_lib/converters/__init__.py +0 -0
- bb_integrations_lib/gravitate/__init__.py +0 -0
- bb_integrations_lib/gravitate/base_api.py +20 -0
- bb_integrations_lib/gravitate/model.py +29 -0
- bb_integrations_lib/gravitate/pe_api.py +122 -0
- bb_integrations_lib/gravitate/rita_api.py +552 -0
- bb_integrations_lib/gravitate/sd_api.py +572 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
- bb_integrations_lib/gravitate/testing/__init__.py +0 -0
- bb_integrations_lib/gravitate/testing/builder.py +55 -0
- bb_integrations_lib/gravitate/testing/openapi.py +70 -0
- bb_integrations_lib/gravitate/testing/util.py +274 -0
- bb_integrations_lib/mappers/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/model.py +106 -0
- bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
- bb_integrations_lib/mappers/prices/protocol.py +20 -0
- bb_integrations_lib/mappers/prices/util.py +61 -0
- bb_integrations_lib/mappers/rita_mapper.py +523 -0
- bb_integrations_lib/models/__init__.py +0 -0
- bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
- bb_integrations_lib/models/enums.py +28 -0
- bb_integrations_lib/models/pipeline_structs.py +76 -0
- bb_integrations_lib/models/probe/probe_event.py +20 -0
- bb_integrations_lib/models/probe/request_data.py +431 -0
- bb_integrations_lib/models/probe/resume_token.py +7 -0
- bb_integrations_lib/models/rita/audit.py +113 -0
- bb_integrations_lib/models/rita/auth.py +30 -0
- bb_integrations_lib/models/rita/bucket.py +17 -0
- bb_integrations_lib/models/rita/config.py +188 -0
- bb_integrations_lib/models/rita/constants.py +19 -0
- bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
- bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
- bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
- bb_integrations_lib/models/rita/crossroads_network.py +41 -0
- bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
- bb_integrations_lib/models/rita/email.py +39 -0
- bb_integrations_lib/models/rita/issue.py +63 -0
- bb_integrations_lib/models/rita/mapping.py +227 -0
- bb_integrations_lib/models/rita/probe.py +58 -0
- bb_integrations_lib/models/rita/reference_data.py +110 -0
- bb_integrations_lib/models/rita/source_system.py +9 -0
- bb_integrations_lib/models/rita/workers.py +76 -0
- bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
- bb_integrations_lib/models/sd/get_order.py +301 -0
- bb_integrations_lib/models/sd/orders.py +18 -0
- bb_integrations_lib/models/sd_api.py +115 -0
- bb_integrations_lib/pipelines/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
- bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
- bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
- bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
- bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
- bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
- bb_integrations_lib/pipelines/shared/__init__.py +0 -0
- bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
- bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
- bb_integrations_lib/pipelines/steps/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
- bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
- bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
- bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
- bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
- bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
- bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
- bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
- bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
- bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
- bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
- bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
- bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
- bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
- bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
- bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
- bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
- bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
- bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
- bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
- bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
- bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
- bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
- bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
- bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
- bb_integrations_lib/pipelines/steps/null_step.py +15 -0
- bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
- bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
- bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
- bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
- bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
- bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
- bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
- bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
- bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
- bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
- bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
- bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
- bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
- bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
- bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
- bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
- bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
- bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
- bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
- bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
- bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
- bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
- bb_integrations_lib/protocols/__init__.py +0 -0
- bb_integrations_lib/protocols/flat_file.py +210 -0
- bb_integrations_lib/protocols/gravitate_client.py +104 -0
- bb_integrations_lib/protocols/pipelines.py +697 -0
- bb_integrations_lib/provider/__init__.py +0 -0
- bb_integrations_lib/provider/api/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/client.py +43 -0
- bb_integrations_lib/provider/api/cargas/model.py +49 -0
- bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
- bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
- bb_integrations_lib/provider/api/dtn/client.py +128 -0
- bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
- bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
- bb_integrations_lib/provider/api/keyvu/client.py +30 -0
- bb_integrations_lib/provider/api/keyvu/model.py +149 -0
- bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/macropoint/client.py +28 -0
- bb_integrations_lib/provider/api/macropoint/model.py +40 -0
- bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
- bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
- bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
- bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
- bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
- bb_integrations_lib/provider/api/platform_science/client.py +147 -0
- bb_integrations_lib/provider/api/platform_science/model.py +82 -0
- bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
- bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
- bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/telapoint/client.py +68 -0
- bb_integrations_lib/provider/api/telapoint/model.py +178 -0
- bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
- bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
- bb_integrations_lib/provider/aws/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/client.py +126 -0
- bb_integrations_lib/provider/ftp/__init__.py +0 -0
- bb_integrations_lib/provider/ftp/client.py +140 -0
- bb_integrations_lib/provider/ftp/interface.py +273 -0
- bb_integrations_lib/provider/ftp/model.py +76 -0
- bb_integrations_lib/provider/imap/__init__.py +0 -0
- bb_integrations_lib/provider/imap/client.py +228 -0
- bb_integrations_lib/provider/imap/model.py +3 -0
- bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
- bb_integrations_lib/provider/sqlserver/client.py +106 -0
- bb_integrations_lib/secrets/__init__.py +4 -0
- bb_integrations_lib/secrets/adapters.py +98 -0
- bb_integrations_lib/secrets/credential_models.py +222 -0
- bb_integrations_lib/secrets/factory.py +85 -0
- bb_integrations_lib/secrets/providers.py +160 -0
- bb_integrations_lib/shared/__init__.py +0 -0
- bb_integrations_lib/shared/exceptions.py +25 -0
- bb_integrations_lib/shared/model.py +1039 -0
- bb_integrations_lib/shared/shared_enums.py +510 -0
- bb_integrations_lib/storage/README.md +236 -0
- bb_integrations_lib/storage/__init__.py +0 -0
- bb_integrations_lib/storage/aws/__init__.py +0 -0
- bb_integrations_lib/storage/aws/s3.py +8 -0
- bb_integrations_lib/storage/defaults.py +72 -0
- bb_integrations_lib/storage/gcs/__init__.py +0 -0
- bb_integrations_lib/storage/gcs/client.py +8 -0
- bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
- bb_integrations_lib/storage/gcsmanager/client.py +8 -0
- bb_integrations_lib/storage/setup.py +29 -0
- bb_integrations_lib/util/__init__.py +0 -0
- bb_integrations_lib/util/cache/__init__.py +0 -0
- bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
- bb_integrations_lib/util/cache/protocol.py +9 -0
- bb_integrations_lib/util/config/__init__.py +0 -0
- bb_integrations_lib/util/config/manager.py +391 -0
- bb_integrations_lib/util/config/model.py +41 -0
- bb_integrations_lib/util/exception_logger/__init__.py +0 -0
- bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
- bb_integrations_lib/util/exception_logger/test.py +114 -0
- bb_integrations_lib/util/utils.py +364 -0
- bb_integrations_lib/workers/__init__.py +0 -0
- bb_integrations_lib/workers/groups.py +13 -0
- bb_integrations_lib/workers/rpc_worker.py +50 -0
- bb_integrations_lib/workers/topics.py +20 -0
- bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
- bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
- bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import base64
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
from pydantic import BaseModel, model_serializer, computed_field
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class EmailAttachment(BaseModel):
|
|
8
|
+
file_name: str
|
|
9
|
+
file_data_encoded: Optional[str] = None
|
|
10
|
+
|
|
11
|
+
def __init__(self, file_name: str, file_data: Optional[bytes] = None, file_data_encoded: Optional[str] = None):
|
|
12
|
+
super().__init__(file_name=file_name)
|
|
13
|
+
self.file_name = file_name
|
|
14
|
+
self.file_data_encoded = self._b64enc(file_data) if file_data is not None else file_data_encoded
|
|
15
|
+
|
|
16
|
+
def _b64enc(self, v) -> str:
|
|
17
|
+
return base64.b64encode(v).decode("utf-8")
|
|
18
|
+
|
|
19
|
+
@computed_field
|
|
20
|
+
def file_data(self) -> bytes:
|
|
21
|
+
return base64.b64decode(self.file_data_encoded)
|
|
22
|
+
|
|
23
|
+
@file_data.setter
|
|
24
|
+
def file_data(self, new_file_data: bytes) -> None:
|
|
25
|
+
self.file_data_encoded = self._b64enc(new_file_data)
|
|
26
|
+
|
|
27
|
+
@model_serializer()
|
|
28
|
+
def serialize_model(self):
|
|
29
|
+
return {
|
|
30
|
+
"file_name": self.file_name,
|
|
31
|
+
"file_data_encoded": self.file_data_encoded
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class EmailData(BaseModel):
|
|
36
|
+
to: str | list[str]
|
|
37
|
+
html_content: str
|
|
38
|
+
subject: str
|
|
39
|
+
attachments: Optional[list[EmailAttachment]] = None
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Optional, List, Annotated
|
|
4
|
+
|
|
5
|
+
from annotated_types import MaxLen
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class IssueCategory(Enum):
|
|
10
|
+
UNKNOWN = "unknown"
|
|
11
|
+
MISC = "misc"
|
|
12
|
+
ORDER = "order"
|
|
13
|
+
PAYROLL = "payroll"
|
|
14
|
+
TANK_READING = "tank"
|
|
15
|
+
PRICE = "price"
|
|
16
|
+
REFERENCE_DATA = "reference_data"
|
|
17
|
+
CROSSROADS = "crossroads"
|
|
18
|
+
CROSSROADS_MAPPINGS = "crossroads_mappings"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class IssueBase(BaseModel):
|
|
22
|
+
key: str = Field(description="Unique key for this issue")
|
|
23
|
+
config_id: str = Field(description="Config object this issue is reported under.")
|
|
24
|
+
# group or config id?
|
|
25
|
+
name: str = Field(description="Human readable name")
|
|
26
|
+
category: IssueCategory = Field(description="Broad category of the issue")
|
|
27
|
+
problem_short: Optional[str] = Field(default=None,
|
|
28
|
+
description="Short description of the problem (e.g. an exception class name)")
|
|
29
|
+
problem_long: Optional[str] = Field(default=None, description="Long description of the problem (e.g. traceback)")
|
|
30
|
+
occurrences: Annotated[
|
|
31
|
+
List[datetime], Field(default=[], description="List of most recent datetimes that this issue was reported",
|
|
32
|
+
max_length=50)]
|
|
33
|
+
|
|
34
|
+
class Config:
|
|
35
|
+
arbitrary_types_allowed = True
|
|
36
|
+
|
|
37
|
+
@property
|
|
38
|
+
def most_recent_occurrence(self) -> Optional[datetime]:
|
|
39
|
+
return max(self.occurrences, default=None)
|
|
40
|
+
|
|
41
|
+
def get_occurrence_limit(self) -> int:
|
|
42
|
+
"""Get the annotated max_length of the occurrences list."""
|
|
43
|
+
for meta in self.model_fields["occurrences"].metadata:
|
|
44
|
+
if isinstance(meta, MaxLen):
|
|
45
|
+
return meta.max_length
|
|
46
|
+
raise AttributeError("IssueBase.occurrences does not have an annotated max_length")
|
|
47
|
+
|
|
48
|
+
def trim_occurrences(self):
|
|
49
|
+
"""
|
|
50
|
+
Trims the occurrences array to fit within the annotated Field.max_length.
|
|
51
|
+
The array gets sorted as a side effect.
|
|
52
|
+
"""
|
|
53
|
+
self.occurrences = sorted(self.occurrences)[-self.get_occurrence_limit():]
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class UpdateIssue(BaseModel):
|
|
57
|
+
key: str = Field(description="Unique key for this issue")
|
|
58
|
+
config_id: Optional[str] = Field(default=None, description="Config object this issue is reported under.")
|
|
59
|
+
name: Optional[str] = Field(default=None, description="Human readable name")
|
|
60
|
+
category: Optional[IssueCategory] = Field(default=None, description="Broad category of the issue")
|
|
61
|
+
problem_short: Optional[str] = Field(default=None,
|
|
62
|
+
description="Short description of the problem (e.g. an exception class name)")
|
|
63
|
+
problem_long: Optional[str] = Field(default=None, description="Long description of the problem (e.g. traceback)")
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
from datetime import datetime, UTC
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Optional, Dict, Self
|
|
4
|
+
import json
|
|
5
|
+
import pandas as pd
|
|
6
|
+
from functools import cached_property
|
|
7
|
+
from bson.objectid import ObjectId
|
|
8
|
+
from pydantic import BaseModel, field_validator, Field
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class MappingType(str, Enum):
|
|
12
|
+
site = "site"
|
|
13
|
+
tank = "tank"
|
|
14
|
+
counterparty = "counterparty"
|
|
15
|
+
credential = "credential"
|
|
16
|
+
product = "product"
|
|
17
|
+
terminal = "terminal"
|
|
18
|
+
driver = "driver"
|
|
19
|
+
depot = "depot"
|
|
20
|
+
trailer = "trailer"
|
|
21
|
+
tractor = "tractor"
|
|
22
|
+
other = "other"
|
|
23
|
+
composite = "composite"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class MapType(str, Enum):
|
|
27
|
+
parent = "parent"
|
|
28
|
+
child = "child"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class View(str, Enum):
|
|
32
|
+
grid = "grid"
|
|
33
|
+
gallery = "gallery"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class CompositeMapKey(BaseModel):
|
|
37
|
+
"""Multi-field composite key for mapping lookups.
|
|
38
|
+
|
|
39
|
+
A composite key allows mappings to be identified by multiple fields
|
|
40
|
+
instead of a single source_id. For example, a mapping might be uniquely
|
|
41
|
+
identified by (tenant, product, terminal) rather than just source_id.
|
|
42
|
+
|
|
43
|
+
Example:
|
|
44
|
+
>>> key = CompositeMapKey(key={"tenant": "acme", "product": "fuel"})
|
|
45
|
+
>>> key.to_cache_key()
|
|
46
|
+
'{"product":"fuel","tenant":"acme"}'
|
|
47
|
+
>>> key.matches({"tenant": "acme"})
|
|
48
|
+
True
|
|
49
|
+
"""
|
|
50
|
+
model_config = {"frozen": True}
|
|
51
|
+
|
|
52
|
+
key: dict[str, str] = Field(..., description="Field-value pairs forming the composite key")
|
|
53
|
+
|
|
54
|
+
@field_validator('key', mode='before')
|
|
55
|
+
@classmethod
|
|
56
|
+
def validate_key(cls, v: dict) -> dict[str, str]:
|
|
57
|
+
if not v:
|
|
58
|
+
raise ValueError("Composite key cannot be empty")
|
|
59
|
+
validated = {}
|
|
60
|
+
for k, val in v.items():
|
|
61
|
+
if not isinstance(k, str) or not k.strip():
|
|
62
|
+
raise ValueError(f"Key must be non-empty string, got: {k}")
|
|
63
|
+
if not isinstance(val, str):
|
|
64
|
+
raise ValueError(f"Value for '{k}' must be string, got {type(val).__name__}")
|
|
65
|
+
validated[k.strip()] = val
|
|
66
|
+
return validated
|
|
67
|
+
|
|
68
|
+
@cached_property
|
|
69
|
+
def _cache_key_str(self) -> str:
|
|
70
|
+
"""Cached canonical string representation for hashing."""
|
|
71
|
+
return json.dumps(self.key, sort_keys=True, separators=(',', ':'))
|
|
72
|
+
|
|
73
|
+
def to_cache_key(self) -> str:
|
|
74
|
+
"""Generate a canonical string representation for caching/hashing (JSON format)."""
|
|
75
|
+
return self._cache_key_str
|
|
76
|
+
|
|
77
|
+
@classmethod
|
|
78
|
+
def from_string(cls, s: str) -> Self:
|
|
79
|
+
"""Parse a composite key from its JSON string representation."""
|
|
80
|
+
if not s:
|
|
81
|
+
raise ValueError("Cannot parse empty string")
|
|
82
|
+
try:
|
|
83
|
+
key_dict = json.loads(s)
|
|
84
|
+
if not isinstance(key_dict, dict):
|
|
85
|
+
raise ValueError("Invalid format: expected JSON object")
|
|
86
|
+
return cls(key=key_dict)
|
|
87
|
+
except json.JSONDecodeError as e:
|
|
88
|
+
raise ValueError(f"Failed to parse composite key: {e}")
|
|
89
|
+
|
|
90
|
+
@cached_property
|
|
91
|
+
def _cached_hash(self) -> int:
|
|
92
|
+
return hash(self._cache_key_str)
|
|
93
|
+
|
|
94
|
+
def __hash__(self) -> int:
|
|
95
|
+
return self._cached_hash
|
|
96
|
+
|
|
97
|
+
def __eq__(self, other: object) -> bool:
|
|
98
|
+
if not isinstance(other, CompositeMapKey):
|
|
99
|
+
return False
|
|
100
|
+
return self.key == other.key
|
|
101
|
+
|
|
102
|
+
def __str__(self) -> str:
|
|
103
|
+
return self.to_cache_key()
|
|
104
|
+
|
|
105
|
+
def __repr__(self) -> str:
|
|
106
|
+
return f"CompositeMapKey({self.to_cache_key()})"
|
|
107
|
+
|
|
108
|
+
def __lt__(self, other: 'CompositeMapKey') -> bool:
|
|
109
|
+
if not isinstance(other, CompositeMapKey):
|
|
110
|
+
return NotImplemented
|
|
111
|
+
return self.to_cache_key() < other.to_cache_key()
|
|
112
|
+
|
|
113
|
+
def matches(self, partial: dict[str, str]) -> bool:
|
|
114
|
+
"""
|
|
115
|
+
Check if this key contains all fields from the partial key.
|
|
116
|
+
|
|
117
|
+
Returns True if all key-value pairs in partial exist in this key.
|
|
118
|
+
Short-circuits on first mismatch for performance.
|
|
119
|
+
"""
|
|
120
|
+
if len(partial) > len(self.key):
|
|
121
|
+
return False
|
|
122
|
+
for k, v in partial.items():
|
|
123
|
+
if self.key.get(k) != v:
|
|
124
|
+
return False
|
|
125
|
+
return True
|
|
126
|
+
|
|
127
|
+
def get(self, field: str, default: str | None = None) -> str | None:
|
|
128
|
+
"""Get a specific component from the composite key."""
|
|
129
|
+
return self.key.get(field, default)
|
|
130
|
+
|
|
131
|
+
@property
|
|
132
|
+
def fields(self) -> list[str]:
|
|
133
|
+
"""Return the list of field names in this composite key."""
|
|
134
|
+
return list(self.key.keys())
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class MapBase(BaseModel):
|
|
138
|
+
updated_by: str | None = Field(default="admin", description="User who last updated the mapping")
|
|
139
|
+
updated_on: datetime | None = Field(default_factory=lambda: datetime.now(UTC), description="Timestamp of last update")
|
|
140
|
+
type: Optional[MappingType | None] = Field(default=None, description="Type of mapping (site, tank, counterparty, etc.)")
|
|
141
|
+
source_system: str | None = Field(default=None, description="External system the mapping originates from")
|
|
142
|
+
is_active: Optional[bool] = Field(default=True, description="Whether the mapping is currently active")
|
|
143
|
+
source_name: str | None = Field(default=None, description="Display name in the source system")
|
|
144
|
+
gravitate_name: str | None = Field(default=None, description="Display name in Gravitate")
|
|
145
|
+
extra_data: Optional[Dict] = Field(default=None, description="Additional metadata for the mapping")
|
|
146
|
+
|
|
147
|
+
@field_validator('type', 'source_system', mode='before')
|
|
148
|
+
@classmethod
|
|
149
|
+
def validate_type_source_system(cls, v):
|
|
150
|
+
if isinstance(v, str) and (v == 'nan' or v == ''):
|
|
151
|
+
return None
|
|
152
|
+
elif isinstance(v, float) and pd.isna(v): # Check for actual NaN (float nan)
|
|
153
|
+
return None
|
|
154
|
+
return v
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class Children(MapBase):
|
|
158
|
+
source_id: str | None = Field(
|
|
159
|
+
default=None,
|
|
160
|
+
description="Source system identifier"
|
|
161
|
+
)
|
|
162
|
+
gravitate_id: str | None = Field(
|
|
163
|
+
default=None,
|
|
164
|
+
description="Gravitate identifier"
|
|
165
|
+
)
|
|
166
|
+
id: Optional[str] = Field(
|
|
167
|
+
default_factory=lambda: str(ObjectId()),
|
|
168
|
+
description="Unique identifier for this child mapping"
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
@field_validator('source_id', 'gravitate_id', mode='before')
|
|
172
|
+
@classmethod
|
|
173
|
+
def validate_identifiers(cls, v):
|
|
174
|
+
if v is None or v == '' or v == 'nan':
|
|
175
|
+
return None
|
|
176
|
+
if isinstance(v, float) and pd.isna(v):
|
|
177
|
+
return None
|
|
178
|
+
return str(v)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
class Map(MapBase):
|
|
183
|
+
source_id: str | CompositeMapKey = Field(
|
|
184
|
+
...,
|
|
185
|
+
description="Source system identifier (string or composite key)"
|
|
186
|
+
)
|
|
187
|
+
gravitate_id: str | CompositeMapKey = Field(
|
|
188
|
+
...,
|
|
189
|
+
description="Gravitate identifier (string or composite key)"
|
|
190
|
+
)
|
|
191
|
+
children: Optional[list[Children]] = Field(
|
|
192
|
+
default_factory=list,
|
|
193
|
+
description="List of child mappings"
|
|
194
|
+
)
|
|
195
|
+
children_type: MappingType | None = Field(
|
|
196
|
+
default=None,
|
|
197
|
+
description="Type of child mappings"
|
|
198
|
+
)
|
|
199
|
+
owning_bucket_id: Optional[str] = Field(
|
|
200
|
+
default=None,
|
|
201
|
+
description="Bucket that owns this mapping"
|
|
202
|
+
)
|
|
203
|
+
group_id: str | None = Field(
|
|
204
|
+
default=None,
|
|
205
|
+
description="Group that owns this mapping"
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def is_composite(self) -> bool:
|
|
210
|
+
"""Check if this mapping uses composite keys."""
|
|
211
|
+
return isinstance(self.source_id, CompositeMapKey)
|
|
212
|
+
|
|
213
|
+
def source_id_str(self) -> str | None:
|
|
214
|
+
"""Get source_id as string (for simple IDs) or None (for composite)."""
|
|
215
|
+
return self.source_id if isinstance(self.source_id, str) else None
|
|
216
|
+
|
|
217
|
+
def source_id_composite(self) -> CompositeMapKey | None:
|
|
218
|
+
"""Get source_id as CompositeMapKey or None (for simple IDs)."""
|
|
219
|
+
return self.source_id if isinstance(self.source_id, CompositeMapKey) else None
|
|
220
|
+
|
|
221
|
+
def gravitate_id_str(self) -> str | None:
|
|
222
|
+
"""Get gravitate_id as string (for simple IDs) or None (for composite)."""
|
|
223
|
+
return self.gravitate_id if isinstance(self.gravitate_id, str) else None
|
|
224
|
+
|
|
225
|
+
def gravitate_id_composite(self) -> CompositeMapKey | None:
|
|
226
|
+
"""Get gravitate_id as CompositeMapKey or None (for simple IDs)."""
|
|
227
|
+
return self.gravitate_id if isinstance(self.gravitate_id, CompositeMapKey) else None
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Literal
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class AuthType(str, Enum):
|
|
9
|
+
no_auth = "no auth"
|
|
10
|
+
bearer_with_username = "bearer with username and password"
|
|
11
|
+
bearer_with_clientid = "bearer with client id"
|
|
12
|
+
|
|
13
|
+
class ProbeSubscriber(BaseModel):
|
|
14
|
+
id: str | None = None
|
|
15
|
+
url: str | None = None
|
|
16
|
+
auth_url: str | None = None
|
|
17
|
+
username: str | None = None
|
|
18
|
+
password: str | None = None
|
|
19
|
+
psk: str | None = None
|
|
20
|
+
auth_type: AuthType = AuthType.no_auth
|
|
21
|
+
is_active: bool = True
|
|
22
|
+
|
|
23
|
+
class WorkerTarget(BaseModel):
|
|
24
|
+
worker_name: str | None = None
|
|
25
|
+
kwargs: dict | None = None
|
|
26
|
+
|
|
27
|
+
class ProbeConfig(BaseModel):
|
|
28
|
+
probe_id: str | None = None
|
|
29
|
+
is_active: bool = True
|
|
30
|
+
report_create: bool = False
|
|
31
|
+
report_delete: bool = False
|
|
32
|
+
report_update: bool = False
|
|
33
|
+
conn_str: str | None = None
|
|
34
|
+
database: str | None = None
|
|
35
|
+
collection: str | None = None
|
|
36
|
+
query: dict = {} # Events will only be reported if the object passes this filter.
|
|
37
|
+
entity_field: str | None = None
|
|
38
|
+
resume_token: str | None = None
|
|
39
|
+
update_check_interval_minutes: int = 0 # 0 implies a keep-alive probe that will always run.
|
|
40
|
+
external_subscribers: list[ProbeSubscriber] = []
|
|
41
|
+
worker_targets: list[WorkerTarget] = [] # List of RITA-owned workers to notify.
|
|
42
|
+
sending_entity_type: str | None = None
|
|
43
|
+
probe_type: str = "StandardProbe" # The type of probe to be run on this config. Usually "StandardProbe"
|
|
44
|
+
resume_token_write_interval_seconds: int = 0 # At least this many seconds must pass before the resume token is written.
|
|
45
|
+
args: dict = {} # arguments for custom probes.
|
|
46
|
+
probe_args: dict = {}
|
|
47
|
+
mode: Literal["test", "prod"] = "test"
|
|
48
|
+
output_request_data: str | None = None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class ProbeStats(BaseModel):
|
|
52
|
+
time_started: str | None = None
|
|
53
|
+
changes_seen: int = 0
|
|
54
|
+
changes_processed: int = 0
|
|
55
|
+
num_times_updated_resume_token: int = 0
|
|
56
|
+
resume_token_time: str | None = None # The timestamp that was associated with the last resume token updated. If the probe fell behind and is catching up, this may be significantly in the past.
|
|
57
|
+
resume_token_update_time: str | None = None # The actual wall clock time when the resume token was last updated.
|
|
58
|
+
extra_data: dict = {}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
from typing import Any, Dict, Optional, Self
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
from datetime import datetime, UTC
|
|
5
|
+
|
|
6
|
+
class ReferenceDataType(str, Enum):
|
|
7
|
+
terminal = "terminal"
|
|
8
|
+
store = "store"
|
|
9
|
+
tank = "tank"
|
|
10
|
+
location = "location"
|
|
11
|
+
product = "product"
|
|
12
|
+
counterparty = "counterparty"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class CoreMasterReferenceData(BaseModel):
|
|
16
|
+
"""
|
|
17
|
+
A single 'item' in the industry. This could point to a store, a product, a company, or anything else.
|
|
18
|
+
The data here lives in the RITA master tenant only, and works in conjunction with the data stored in RITA client
|
|
19
|
+
tenant's tables to enable conversion from one client's representation to another.
|
|
20
|
+
"""
|
|
21
|
+
data_type: ReferenceDataType
|
|
22
|
+
"""The data type of this item. Used for lookups."""
|
|
23
|
+
|
|
24
|
+
mrid: str
|
|
25
|
+
"""The unique ID of this item. MasterReferenceLink objects will link on this field."""
|
|
26
|
+
|
|
27
|
+
name: str
|
|
28
|
+
"""Display name of this item."""
|
|
29
|
+
|
|
30
|
+
source_tenant: str | None = None
|
|
31
|
+
"""The original tenant of this item, if it has one."""
|
|
32
|
+
|
|
33
|
+
matching_info: Dict[str, Any] = {}
|
|
34
|
+
"""A dictionary describing the info that makes this object unique. E.g. for terminals this could be the federal TCN."""
|
|
35
|
+
# We plan to engineer some way to throw a client's data at an LLM for matching. Matching_info would be the place to store
|
|
36
|
+
# data that's similar in structure.
|
|
37
|
+
|
|
38
|
+
mrd_extra_data: Dict[str, Any] = {}
|
|
39
|
+
"""Any additional data about the object that should be tied to it during the conversion process."""
|
|
40
|
+
|
|
41
|
+
updated_by: Optional[str] = None
|
|
42
|
+
updated_on: Optional[datetime] = datetime.now(UTC)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class MasterReferenceData(CoreMasterReferenceData):
|
|
46
|
+
children: Dict[str, CoreMasterReferenceData] = {}
|
|
47
|
+
"""The children of this master reference data"""
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class CoreMasterReferenceLink(BaseModel):
|
|
51
|
+
"""
|
|
52
|
+
Links 'items' in this tenant's gravitate instance to the MasterReferenceData. The data here lives only in RITA client
|
|
53
|
+
tenants, not the RITA master tenant, and matches some record in the MasterReferenceData. It is possible for one
|
|
54
|
+
MasterReferenceLink to match one MasterReferenceData.
|
|
55
|
+
"""
|
|
56
|
+
data_type: ReferenceDataType
|
|
57
|
+
"""The data type of this item. Used for lookups."""
|
|
58
|
+
|
|
59
|
+
mrid: Optional[str] = None
|
|
60
|
+
"""Linked MasterReferenceData item. This may be empty and indicates that there is no associated item."""
|
|
61
|
+
|
|
62
|
+
display_name: str
|
|
63
|
+
"""
|
|
64
|
+
Display name of this item, in the context of the tenant's gravitate. This is not used for code lookups and can be
|
|
65
|
+
set to anything. It may be set automatically by the sync module, if the source_system is "Gravitate"
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
source_id: str
|
|
69
|
+
"""
|
|
70
|
+
Either (but not both):
|
|
71
|
+
- Mongodb ID of the item in the tenant's gravitate (if source_system is "Gravitate").
|
|
72
|
+
- ID of the item in the specified source system.
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
matching_info: Dict[str, Any] = {}
|
|
76
|
+
"""A dictionary describing the info that makes this object unique. E.g. for terminals this could be the federal TCN, or an address."""
|
|
77
|
+
|
|
78
|
+
mrl_extra_data: Dict[str, Any] = {}
|
|
79
|
+
"""Any additional data about the object that should be tied to it during the conversion process."""
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
updated_by: Optional[str] = None
|
|
83
|
+
updated_on: Optional[datetime] = datetime.now(UTC)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class MasterReferenceLink(CoreMasterReferenceLink):
|
|
87
|
+
source_system: str = "Gravitate"
|
|
88
|
+
"""The source system of this item. Defaults to 'Gravitate'. Used in mapping requests to add detail in mappings."""
|
|
89
|
+
|
|
90
|
+
children: Dict[str, CoreMasterReferenceLink] = {}
|
|
91
|
+
"""Child references keyed by their Source ID. These can only match child references of the parent's matched master
|
|
92
|
+
reference data."""
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class ReferenceDataMappingExtraData(BaseModel):
|
|
96
|
+
origin_mrl_extra_data: dict[str, Any] = {}
|
|
97
|
+
mrd_extra_data: dict[str, Any] = {}
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class ReferenceDataMapping(BaseModel):
|
|
101
|
+
origin_source_id: str = ""
|
|
102
|
+
origin_source_system: str = ""
|
|
103
|
+
origin_mrl_id: str | None = None
|
|
104
|
+
origin_tenant: str = ""
|
|
105
|
+
target_tenant: str | None = None
|
|
106
|
+
target_mrls: list[MasterReferenceLink] = []
|
|
107
|
+
matched_mrid: str | None = None
|
|
108
|
+
matched_child_mrid: str | None = None
|
|
109
|
+
extra_data: ReferenceDataMappingExtraData = ReferenceDataMappingExtraData()
|
|
110
|
+
milliseconds_taken: int | None = None
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from datetime import datetime, UTC
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Optional, Union, Literal, Annotated, Any
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, Field
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class UnknownRunnableException(Exception):
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
WorkerTaskState = Literal["sent", "received", "working", "failed", "completed"]
|
|
12
|
+
|
|
13
|
+
class WorkerRequest(BaseModel):
|
|
14
|
+
"""A request for a specific runnable to be executed on an available worker."""
|
|
15
|
+
runnable_name: str
|
|
16
|
+
runnable_kwargs: dict[str, Any] = {}
|
|
17
|
+
tenant_name: str
|
|
18
|
+
originator: Literal["backend", "probe", "other"] = "other"
|
|
19
|
+
|
|
20
|
+
class WorkerContentFile(BaseModel):
|
|
21
|
+
"""
|
|
22
|
+
A file included in a worker's response. Supports binary (non-text) files by automatically encoding/decoding from
|
|
23
|
+
base64 when serialized.
|
|
24
|
+
"""
|
|
25
|
+
file_name: str
|
|
26
|
+
mime_type: str | None = None
|
|
27
|
+
content: bytes | None = None
|
|
28
|
+
|
|
29
|
+
class Config:
|
|
30
|
+
ser_json_bytes = "base64"
|
|
31
|
+
val_json_bytes = "base64"
|
|
32
|
+
|
|
33
|
+
class WorkerResponseStatus(str, Enum):
|
|
34
|
+
success = "success"
|
|
35
|
+
error = "error"
|
|
36
|
+
|
|
37
|
+
class WorkerSuccessResponse(BaseModel):
|
|
38
|
+
"""A response from a worker indicating success and providing results."""
|
|
39
|
+
# This is a pretty weird field definition. Basically, to support using this field as a discriminator for the
|
|
40
|
+
# WorkerResponse union type, we need it to only ever be status="success", (or "error", in the error class).
|
|
41
|
+
# These type annotations mean it is a WorkerResponseStatus that can only ever be .success, and that it cannot be set
|
|
42
|
+
# in the init method (init=False) or anywhere else (init_var=True).
|
|
43
|
+
# You can't use a frozen field here because it won't play nice with Beanie serialization.
|
|
44
|
+
status: Annotated[Literal[WorkerResponseStatus.success], Field(init_var=True, init=False)] = WorkerResponseStatus.success
|
|
45
|
+
runnable_name: str
|
|
46
|
+
content: str | None = None
|
|
47
|
+
content_file: WorkerContentFile | None = None
|
|
48
|
+
extra_data: dict = {}
|
|
49
|
+
|
|
50
|
+
class WorkerErrorResponse(BaseModel):
|
|
51
|
+
"""A response from a worker indicating an error occurred and providing error details."""
|
|
52
|
+
status: Annotated[Literal[WorkerResponseStatus.error], Field(init_var=True, init=False)] = WorkerResponseStatus.error
|
|
53
|
+
runnable_name: str
|
|
54
|
+
exception_type_name: str
|
|
55
|
+
error_message: str
|
|
56
|
+
reference_code: Optional[str] = None
|
|
57
|
+
|
|
58
|
+
WorkerResponse = Annotated[Union[WorkerSuccessResponse, WorkerErrorResponse], Field(discriminator="status")]
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class WorkerUpdate(BaseModel):
|
|
62
|
+
"""Provides status updates to the requester for backgrounded tasks."""
|
|
63
|
+
tenant_name: str
|
|
64
|
+
originator: str
|
|
65
|
+
sent_at: datetime = Field(default_factory=lambda: datetime.now(tz=UTC))
|
|
66
|
+
state: WorkerTaskState
|
|
67
|
+
response: Optional[WorkerResponse] = None
|
|
68
|
+
|
|
69
|
+
class WorkerTask(BaseModel):
|
|
70
|
+
correlation_id: str
|
|
71
|
+
created_at: datetime
|
|
72
|
+
updated_at: datetime
|
|
73
|
+
mode: Literal["immediate", "background"]
|
|
74
|
+
state: WorkerTaskState
|
|
75
|
+
request: WorkerRequest
|
|
76
|
+
response: Optional[WorkerResponse] = None
|