bb-integrations-library 3.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bb_integrations_lib/__init__.py +0 -0
- bb_integrations_lib/converters/__init__.py +0 -0
- bb_integrations_lib/gravitate/__init__.py +0 -0
- bb_integrations_lib/gravitate/base_api.py +20 -0
- bb_integrations_lib/gravitate/model.py +29 -0
- bb_integrations_lib/gravitate/pe_api.py +122 -0
- bb_integrations_lib/gravitate/rita_api.py +552 -0
- bb_integrations_lib/gravitate/sd_api.py +572 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
- bb_integrations_lib/gravitate/testing/__init__.py +0 -0
- bb_integrations_lib/gravitate/testing/builder.py +55 -0
- bb_integrations_lib/gravitate/testing/openapi.py +70 -0
- bb_integrations_lib/gravitate/testing/util.py +274 -0
- bb_integrations_lib/mappers/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/model.py +106 -0
- bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
- bb_integrations_lib/mappers/prices/protocol.py +20 -0
- bb_integrations_lib/mappers/prices/util.py +61 -0
- bb_integrations_lib/mappers/rita_mapper.py +523 -0
- bb_integrations_lib/models/__init__.py +0 -0
- bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
- bb_integrations_lib/models/enums.py +28 -0
- bb_integrations_lib/models/pipeline_structs.py +76 -0
- bb_integrations_lib/models/probe/probe_event.py +20 -0
- bb_integrations_lib/models/probe/request_data.py +431 -0
- bb_integrations_lib/models/probe/resume_token.py +7 -0
- bb_integrations_lib/models/rita/audit.py +113 -0
- bb_integrations_lib/models/rita/auth.py +30 -0
- bb_integrations_lib/models/rita/bucket.py +17 -0
- bb_integrations_lib/models/rita/config.py +188 -0
- bb_integrations_lib/models/rita/constants.py +19 -0
- bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
- bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
- bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
- bb_integrations_lib/models/rita/crossroads_network.py +41 -0
- bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
- bb_integrations_lib/models/rita/email.py +39 -0
- bb_integrations_lib/models/rita/issue.py +63 -0
- bb_integrations_lib/models/rita/mapping.py +227 -0
- bb_integrations_lib/models/rita/probe.py +58 -0
- bb_integrations_lib/models/rita/reference_data.py +110 -0
- bb_integrations_lib/models/rita/source_system.py +9 -0
- bb_integrations_lib/models/rita/workers.py +76 -0
- bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
- bb_integrations_lib/models/sd/get_order.py +301 -0
- bb_integrations_lib/models/sd/orders.py +18 -0
- bb_integrations_lib/models/sd_api.py +115 -0
- bb_integrations_lib/pipelines/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
- bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
- bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
- bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
- bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
- bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
- bb_integrations_lib/pipelines/shared/__init__.py +0 -0
- bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
- bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
- bb_integrations_lib/pipelines/steps/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
- bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
- bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
- bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
- bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
- bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
- bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
- bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
- bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
- bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
- bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
- bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
- bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
- bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
- bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
- bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
- bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
- bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
- bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
- bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
- bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
- bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
- bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
- bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
- bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
- bb_integrations_lib/pipelines/steps/null_step.py +15 -0
- bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
- bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
- bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
- bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
- bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
- bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
- bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
- bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
- bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
- bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
- bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
- bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
- bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
- bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
- bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
- bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
- bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
- bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
- bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
- bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
- bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
- bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
- bb_integrations_lib/protocols/__init__.py +0 -0
- bb_integrations_lib/protocols/flat_file.py +210 -0
- bb_integrations_lib/protocols/gravitate_client.py +104 -0
- bb_integrations_lib/protocols/pipelines.py +697 -0
- bb_integrations_lib/provider/__init__.py +0 -0
- bb_integrations_lib/provider/api/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/client.py +43 -0
- bb_integrations_lib/provider/api/cargas/model.py +49 -0
- bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
- bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
- bb_integrations_lib/provider/api/dtn/client.py +128 -0
- bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
- bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
- bb_integrations_lib/provider/api/keyvu/client.py +30 -0
- bb_integrations_lib/provider/api/keyvu/model.py +149 -0
- bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/macropoint/client.py +28 -0
- bb_integrations_lib/provider/api/macropoint/model.py +40 -0
- bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
- bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
- bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
- bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
- bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
- bb_integrations_lib/provider/api/platform_science/client.py +147 -0
- bb_integrations_lib/provider/api/platform_science/model.py +82 -0
- bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
- bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
- bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/telapoint/client.py +68 -0
- bb_integrations_lib/provider/api/telapoint/model.py +178 -0
- bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
- bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
- bb_integrations_lib/provider/aws/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/client.py +126 -0
- bb_integrations_lib/provider/ftp/__init__.py +0 -0
- bb_integrations_lib/provider/ftp/client.py +140 -0
- bb_integrations_lib/provider/ftp/interface.py +273 -0
- bb_integrations_lib/provider/ftp/model.py +76 -0
- bb_integrations_lib/provider/imap/__init__.py +0 -0
- bb_integrations_lib/provider/imap/client.py +228 -0
- bb_integrations_lib/provider/imap/model.py +3 -0
- bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
- bb_integrations_lib/provider/sqlserver/client.py +106 -0
- bb_integrations_lib/secrets/__init__.py +4 -0
- bb_integrations_lib/secrets/adapters.py +98 -0
- bb_integrations_lib/secrets/credential_models.py +222 -0
- bb_integrations_lib/secrets/factory.py +85 -0
- bb_integrations_lib/secrets/providers.py +160 -0
- bb_integrations_lib/shared/__init__.py +0 -0
- bb_integrations_lib/shared/exceptions.py +25 -0
- bb_integrations_lib/shared/model.py +1039 -0
- bb_integrations_lib/shared/shared_enums.py +510 -0
- bb_integrations_lib/storage/README.md +236 -0
- bb_integrations_lib/storage/__init__.py +0 -0
- bb_integrations_lib/storage/aws/__init__.py +0 -0
- bb_integrations_lib/storage/aws/s3.py +8 -0
- bb_integrations_lib/storage/defaults.py +72 -0
- bb_integrations_lib/storage/gcs/__init__.py +0 -0
- bb_integrations_lib/storage/gcs/client.py +8 -0
- bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
- bb_integrations_lib/storage/gcsmanager/client.py +8 -0
- bb_integrations_lib/storage/setup.py +29 -0
- bb_integrations_lib/util/__init__.py +0 -0
- bb_integrations_lib/util/cache/__init__.py +0 -0
- bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
- bb_integrations_lib/util/cache/protocol.py +9 -0
- bb_integrations_lib/util/config/__init__.py +0 -0
- bb_integrations_lib/util/config/manager.py +391 -0
- bb_integrations_lib/util/config/model.py +41 -0
- bb_integrations_lib/util/exception_logger/__init__.py +0 -0
- bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
- bb_integrations_lib/util/exception_logger/test.py +114 -0
- bb_integrations_lib/util/utils.py +364 -0
- bb_integrations_lib/workers/__init__.py +0 -0
- bb_integrations_lib/workers/groups.py +13 -0
- bb_integrations_lib/workers/rpc_worker.py +50 -0
- bb_integrations_lib/workers/topics.py +20 -0
- bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
- bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
- bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import AsyncGenerator, Optional, cast
|
|
3
|
+
from typing import override
|
|
4
|
+
|
|
5
|
+
import dateutil
|
|
6
|
+
from babel.numbers import parse_decimal, NumberFormatError
|
|
7
|
+
from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
|
|
8
|
+
from bb_integrations_lib.util.config.manager import GlobalConfigManager
|
|
9
|
+
from dateutil.parser import parse
|
|
10
|
+
from dateutil.tz import gettz
|
|
11
|
+
from loguru import logger
|
|
12
|
+
|
|
13
|
+
from bb_integrations_lib.mappers.rita_mapper import RitaMapper, AsyncMappingProvider
|
|
14
|
+
from bb_integrations_lib.models.rita.issue import IssueCategory
|
|
15
|
+
from bb_integrations_lib.models.rita.mapping import MappingType
|
|
16
|
+
from bb_integrations_lib.protocols.flat_file import TankReading, TankMonitorType
|
|
17
|
+
from bb_integrations_lib.protocols.pipelines import Parser
|
|
18
|
+
from bb_integrations_lib.shared.model import MappingMode
|
|
19
|
+
from bb_integrations_lib.util.utils import lookup
|
|
20
|
+
|
|
21
|
+
tzmapping = {
|
|
22
|
+
'EST': gettz("US/Eastern"),
|
|
23
|
+
'EDT': gettz("US/Eastern"),
|
|
24
|
+
'CST': gettz("US/Central"),
|
|
25
|
+
'CDT': gettz("US/Central"),
|
|
26
|
+
'MST': gettz("US/Mountain"),
|
|
27
|
+
'MDT': gettz("US/Mountain"),
|
|
28
|
+
'PST': gettz("US/Pacific"),
|
|
29
|
+
'PDT': gettz("US/Pacific"),
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class TankReadingParser(Parser):
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
sd_client: GravitateSDAPI,
|
|
37
|
+
source_system: str | None = None,
|
|
38
|
+
mapping_provider: Optional[AsyncMappingProvider] = None,
|
|
39
|
+
included_payload: Optional[dict] = None,
|
|
40
|
+
verbose: bool = False,
|
|
41
|
+
*args,
|
|
42
|
+
**kwargs
|
|
43
|
+
):
|
|
44
|
+
super().__init__(source_system, mapping_provider)
|
|
45
|
+
self.sd_client = sd_client
|
|
46
|
+
self.verbose = verbose
|
|
47
|
+
self.included_payload = included_payload or {}
|
|
48
|
+
self.mapper: Optional[RitaMapper] = None
|
|
49
|
+
|
|
50
|
+
@override
|
|
51
|
+
async def parse(self, data: list[dict], mapping_type: MappingMode | None = None) -> AsyncGenerator[
|
|
52
|
+
TankReading, None]:
|
|
53
|
+
if mapping_type is None:
|
|
54
|
+
logger.warning("TankReadingParser.parse mapping_type is None, defaulting to skip")
|
|
55
|
+
mapping_type = MappingMode.skip
|
|
56
|
+
sd_store_lkp = await self.get_store_lkp()
|
|
57
|
+
self.mapper = await self.load_mapper()
|
|
58
|
+
preparsed_records = self.preparse(data, mapping_type)
|
|
59
|
+
|
|
60
|
+
for rec in preparsed_records:
|
|
61
|
+
with logger.catch(message=f"Skipped record {rec} due to error"):
|
|
62
|
+
store_id = rec.get("site_id")
|
|
63
|
+
sd_store = sd_store_lkp.get(store_id , {})
|
|
64
|
+
store_tz = sd_store.get("timezone")
|
|
65
|
+
yield TankReading(
|
|
66
|
+
store=store_id,
|
|
67
|
+
date=rec.get("reading_time"),
|
|
68
|
+
monitor_type=TankMonitorType.bbd,
|
|
69
|
+
timezone=rec.get("timezone") or store_tz,
|
|
70
|
+
volume=rec.get("volume"),
|
|
71
|
+
tank=rec.get("tank_id"),
|
|
72
|
+
payload=self.included_payload
|
|
73
|
+
)
|
|
74
|
+
async def get_store_lkp(self):
|
|
75
|
+
stores = await self.sd_client.all_stores()
|
|
76
|
+
return lookup(stores.json(), lambda x: x.get("store_number"))
|
|
77
|
+
|
|
78
|
+
def preparse(self, records: list[dict], mapping_type: MappingMode) -> list:
|
|
79
|
+
"""Perform basic sanity checking on records and map tank and site ids, if applicable."""
|
|
80
|
+
parsed_records = []
|
|
81
|
+
mapping_failures = []
|
|
82
|
+
for translated in records:
|
|
83
|
+
try:
|
|
84
|
+
translated_volume = translated.get("volume")
|
|
85
|
+
if translated_volume == 'nan':
|
|
86
|
+
if self.verbose:
|
|
87
|
+
logger.warning(f"Skipped record {translated} due to NaN volume.")
|
|
88
|
+
continue
|
|
89
|
+
try:
|
|
90
|
+
if isinstance(translated_volume, str):
|
|
91
|
+
trans_vol_decimal = float(parse_decimal(translated_volume, locale="en_US"))
|
|
92
|
+
else:
|
|
93
|
+
trans_vol_decimal = float(translated_volume)
|
|
94
|
+
|
|
95
|
+
translated["volume"] = trans_vol_decimal
|
|
96
|
+
except NumberFormatError:
|
|
97
|
+
if self.verbose:
|
|
98
|
+
logger.warning(
|
|
99
|
+
f"Skipped record {translated} due to invalid volume value '{translated_volume}'.")
|
|
100
|
+
continue
|
|
101
|
+
except TypeError:
|
|
102
|
+
if self.verbose:
|
|
103
|
+
logger.warning(
|
|
104
|
+
f"Skipped record {translated} due to invalid volume value '{translated_volume}'."
|
|
105
|
+
)
|
|
106
|
+
continue
|
|
107
|
+
if trans_vol_decimal < 0:
|
|
108
|
+
if self.verbose:
|
|
109
|
+
logger.warning(f"Skipped record {translated} due to negative volume.")
|
|
110
|
+
continue
|
|
111
|
+
if translated.get("tank_id") == "nan":
|
|
112
|
+
if self.verbose:
|
|
113
|
+
logger.warning(f"Skipped record {translated} due to NaN tank.")
|
|
114
|
+
continue
|
|
115
|
+
if not translated.get("reading_time"):
|
|
116
|
+
logger.warning(f"Skipped record {translated} due to missing date")
|
|
117
|
+
continue
|
|
118
|
+
try:
|
|
119
|
+
date_parsed = dateutil.parser.parse(translated.get("reading_time"), tzinfos=tzmapping)
|
|
120
|
+
translated["reading_time"] = date_parsed.isoformat()
|
|
121
|
+
except Exception as parse_error:
|
|
122
|
+
logger.warning(f"Skipped record {translated} due to date parsing error: {parse_error}")
|
|
123
|
+
continue
|
|
124
|
+
|
|
125
|
+
if mapping_type == MappingMode.skip:
|
|
126
|
+
parsed_records.append(translated)
|
|
127
|
+
elif mapping_type == MappingMode.partial or mapping_type == MappingMode.full:
|
|
128
|
+
try:
|
|
129
|
+
site_id = translated["site_id"]
|
|
130
|
+
tank_id = translated["tank_id"]
|
|
131
|
+
mapped_site_ids = self.mapper.get_gravitate_parent_ids(site_id, MappingType.site)
|
|
132
|
+
mapped_tank_ids = self.mapper.get_gravitate_child_ids(site_id, tank_id.strip(), MappingType.tank)
|
|
133
|
+
for site_id in mapped_site_ids:
|
|
134
|
+
for tank_id in mapped_tank_ids:
|
|
135
|
+
translated["site_id"] = site_id
|
|
136
|
+
translated["tank_id"] = tank_id
|
|
137
|
+
parsed_records.append(translated)
|
|
138
|
+
except (KeyError, ValueError) as e:
|
|
139
|
+
if mapping_type == MappingMode.partial:
|
|
140
|
+
parsed_records.append(translated)
|
|
141
|
+
else:
|
|
142
|
+
raise e
|
|
143
|
+
|
|
144
|
+
except (KeyError, ValueError) as e:
|
|
145
|
+
logger.warning(f"Skipped record {translated} due to error: {e}")
|
|
146
|
+
mapping_failures.append(translated)
|
|
147
|
+
if len(mapping_failures) > 0:
|
|
148
|
+
self.record_issue(
|
|
149
|
+
key_suffix="mapping_errors",
|
|
150
|
+
name=f"Mapping errors",
|
|
151
|
+
category=IssueCategory.TANK_READING,
|
|
152
|
+
problem_short=f"{len(mapping_failures)} rows failed to map",
|
|
153
|
+
problem_long=json.dumps(mapping_failures)
|
|
154
|
+
)
|
|
155
|
+
return parsed_records
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
from zoneinfo import ZoneInfo
|
|
2
|
+
|
|
3
|
+
import dateutil
|
|
4
|
+
import json
|
|
5
|
+
from babel.numbers import parse_decimal, NumberFormatError
|
|
6
|
+
from dateutil.parser import parse
|
|
7
|
+
from dateutil.tz import gettz
|
|
8
|
+
from loguru import logger
|
|
9
|
+
from pandas import DataFrame
|
|
10
|
+
from typing import AsyncGenerator, Optional
|
|
11
|
+
from typing import override
|
|
12
|
+
|
|
13
|
+
from bb_integrations_lib.mappers.rita_mapper import RitaMapper, AsyncMappingProvider
|
|
14
|
+
from bb_integrations_lib.models.rita.issue import IssueCategory
|
|
15
|
+
from bb_integrations_lib.models.rita.mapping import MappingType
|
|
16
|
+
from bb_integrations_lib.protocols.flat_file import TankSales
|
|
17
|
+
from bb_integrations_lib.protocols.pipelines import Parser
|
|
18
|
+
from bb_integrations_lib.shared.model import MappingMode
|
|
19
|
+
|
|
20
|
+
tzmapping = {
|
|
21
|
+
'EST': gettz("US/Eastern"),
|
|
22
|
+
'EDT': gettz("US/Eastern"),
|
|
23
|
+
'CST': gettz("US/Central"),
|
|
24
|
+
'CDT': gettz("US/Central"),
|
|
25
|
+
'MST': gettz("US/Mountain"),
|
|
26
|
+
'MDT': gettz("US/Mountain"),
|
|
27
|
+
'PST': gettz("US/Pacific"),
|
|
28
|
+
'PDT': gettz("US/Pacific"),
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class SalesParser(Parser):
|
|
33
|
+
def __init__(
|
|
34
|
+
self,
|
|
35
|
+
tenant_name: str,
|
|
36
|
+
source_system: str | None = None,
|
|
37
|
+
mapping_provider: Optional[AsyncMappingProvider] = None,
|
|
38
|
+
included_payload: Optional[dict] = None,
|
|
39
|
+
verbose: bool = False,
|
|
40
|
+
deduplicate: bool = True,
|
|
41
|
+
):
|
|
42
|
+
super().__init__(tenant_name, source_system, mapping_provider)
|
|
43
|
+
self.verbose = verbose
|
|
44
|
+
self.included_payload = included_payload or {}
|
|
45
|
+
self.mapper: Optional[RitaMapper] = None
|
|
46
|
+
self.deduplicate = deduplicate
|
|
47
|
+
|
|
48
|
+
def dedupe_records(self, records: list[dict]) -> list[dict]:
|
|
49
|
+
df = DataFrame(records)
|
|
50
|
+
grouped_df = df.groupby(["site_id", "tank_id", "date"], as_index=False)["sales"].sum()
|
|
51
|
+
grouped_df = grouped_df[["site_id", "tank_id", "date", "sales"]]
|
|
52
|
+
return grouped_df.to_dict("records")
|
|
53
|
+
|
|
54
|
+
@override
|
|
55
|
+
async def parse(self, data: list[dict], mapping_type: MappingMode | None = None) -> AsyncGenerator[TankSales, None]:
|
|
56
|
+
if mapping_type is None:
|
|
57
|
+
logger.warning("TankSalesParser.parse mapping_type is None, defaulting to skip")
|
|
58
|
+
mapping_type = MappingMode.skip
|
|
59
|
+
self.mapper = await self.load_mapper()
|
|
60
|
+
preparsed_records = self.preparse(data, mapping_type)
|
|
61
|
+
if self.deduplicate:
|
|
62
|
+
preparsed_records = self.dedupe_records(preparsed_records)
|
|
63
|
+
|
|
64
|
+
for rec in preparsed_records:
|
|
65
|
+
with logger.catch(message=f"Skipped record {rec} due to error"):
|
|
66
|
+
ts = TankSales(
|
|
67
|
+
store_number=rec["site_id"],
|
|
68
|
+
tank_id=rec["tank_id"],
|
|
69
|
+
sales=float(rec["sales"]),
|
|
70
|
+
date=parse(rec['date']).replace(tzinfo=ZoneInfo("UTC")).isoformat(),
|
|
71
|
+
)
|
|
72
|
+
yield ts
|
|
73
|
+
|
|
74
|
+
def preparse(self, records: list[dict], mapping_type: MappingMode) -> list:
|
|
75
|
+
"""Perform basic sanity checking on records and map tank and site ids, if applicable."""
|
|
76
|
+
parsed_records = []
|
|
77
|
+
mapping_failures = []
|
|
78
|
+
for translated in records:
|
|
79
|
+
try:
|
|
80
|
+
translated_volume = translated.get("sales")
|
|
81
|
+
if translated_volume == 'nan':
|
|
82
|
+
if self.verbose:
|
|
83
|
+
logger.warning(f"Skipped record {translated} due to NaN volume.")
|
|
84
|
+
continue
|
|
85
|
+
try:
|
|
86
|
+
trans_vol_decimal = float(parse_decimal(translated_volume, locale="en_US"))
|
|
87
|
+
translated["sales"] = float(trans_vol_decimal)
|
|
88
|
+
except NumberFormatError:
|
|
89
|
+
if self.verbose:
|
|
90
|
+
logger.warning(
|
|
91
|
+
f"Skipped record {translated} due to invalid volume value '{translated_volume}'.")
|
|
92
|
+
continue
|
|
93
|
+
if trans_vol_decimal < 0:
|
|
94
|
+
if self.verbose:
|
|
95
|
+
logger.warning(f"Skipped record {translated} due to negative volume.")
|
|
96
|
+
continue
|
|
97
|
+
if translated.get("tank_id") == "nan":
|
|
98
|
+
if self.verbose:
|
|
99
|
+
logger.warning(f"Skipped record {translated} due to NaN tank.")
|
|
100
|
+
continue
|
|
101
|
+
if not translated.get("date"):
|
|
102
|
+
logger.warning(f"Skipped record {translated} due to missing date")
|
|
103
|
+
continue
|
|
104
|
+
try:
|
|
105
|
+
date_parsed = dateutil.parser.parse(translated.get("date"), tzinfos=tzmapping)
|
|
106
|
+
translated["date"] = date_parsed.isoformat()
|
|
107
|
+
except Exception as parse_error:
|
|
108
|
+
logger.warning(f"Skipped record {translated} due to date parsing error: {parse_error}")
|
|
109
|
+
continue
|
|
110
|
+
|
|
111
|
+
if mapping_type == MappingMode.skip:
|
|
112
|
+
parsed_records.append(translated)
|
|
113
|
+
elif mapping_type == MappingMode.partial or mapping_type == MappingMode.full:
|
|
114
|
+
try:
|
|
115
|
+
site_id = translated["site_id"]
|
|
116
|
+
tank_id = translated["tank_id"]
|
|
117
|
+
mapped_site_ids = self.mapper.get_gravitate_parent_ids(site_id, MappingType.site)
|
|
118
|
+
mapped_tank_ids = self.mapper.get_gravitate_child_ids(
|
|
119
|
+
site_id, tank_id.strip(), MappingType.tank
|
|
120
|
+
)
|
|
121
|
+
for site_id in mapped_site_ids:
|
|
122
|
+
for tank_id in mapped_tank_ids:
|
|
123
|
+
translated["site_id"] = site_id
|
|
124
|
+
translated["tank_id"] = tank_id
|
|
125
|
+
parsed_records.append(translated)
|
|
126
|
+
except (KeyError, ValueError) as e:
|
|
127
|
+
if mapping_type == MappingMode.partial:
|
|
128
|
+
parsed_records.append(translated)
|
|
129
|
+
else:
|
|
130
|
+
raise e
|
|
131
|
+
|
|
132
|
+
except (KeyError, ValueError) as e:
|
|
133
|
+
if self.verbose:
|
|
134
|
+
logger.warning(f"Skipped record {translated} due to error: {e}")
|
|
135
|
+
mapping_failures.append(translated)
|
|
136
|
+
if len(mapping_failures) > 0:
|
|
137
|
+
self.record_issue(
|
|
138
|
+
key_suffix="mapping_errors",
|
|
139
|
+
name=f"Mapping errors",
|
|
140
|
+
category=IssueCategory.TANK_READING,
|
|
141
|
+
problem_short=f"{len(mapping_failures)} rows failed to map",
|
|
142
|
+
problem_long=json.dumps(mapping_failures)
|
|
143
|
+
)
|
|
144
|
+
return parsed_records
|
|
File without changes
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
from bb_integrations_lib.models.sd.bols_and_drops import (
|
|
6
|
+
AllocatedBOL,
|
|
7
|
+
BOL,
|
|
8
|
+
BOLDetail,
|
|
9
|
+
Drop as ExecutedDrop,
|
|
10
|
+
)
|
|
11
|
+
from bb_integrations_lib.models.sd.get_order import (
|
|
12
|
+
Drop as PlannedDrop,
|
|
13
|
+
DropDetail as PlannedDropDetail,
|
|
14
|
+
Load as PlannedLoad,
|
|
15
|
+
LoadDetail as PlannedLoadDetail,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class MatchedAllocation(BaseModel):
|
|
20
|
+
"""Result of matching an allocated BOL to planned and executed data."""
|
|
21
|
+
|
|
22
|
+
# Executed
|
|
23
|
+
allocated_bol: AllocatedBOL
|
|
24
|
+
executed_bol: Optional[BOL] = None
|
|
25
|
+
executed_bol_detail: Optional[BOLDetail] = None
|
|
26
|
+
executed_drop: Optional[ExecutedDrop] = None
|
|
27
|
+
|
|
28
|
+
# Planned
|
|
29
|
+
planned_load: Optional[PlannedLoad] = None
|
|
30
|
+
planned_load_detail: Optional[PlannedLoadDetail] = None
|
|
31
|
+
planned_drop: Optional[PlannedDrop] = None
|
|
32
|
+
planned_drop_detail: Optional[PlannedDropDetail] = None
|
|
33
|
+
|
|
34
|
+
# Variance
|
|
35
|
+
planned_quantity: Optional[int] = None
|
|
36
|
+
actual_quantity: int
|
|
37
|
+
variance: Optional[int] = None
|
|
38
|
+
variance_pct: Optional[float] = None
|
|
39
|
+
|
|
40
|
+
# Match status
|
|
41
|
+
matched_to_planned_drop: bool = False
|
|
42
|
+
matched_to_planned_load: bool = False
|
|
43
|
+
matched_to_executed_bol: bool = False
|
|
44
|
+
matched_to_executed_drop: bool = False
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def calculate_volume_variance(
|
|
48
|
+
actual_qty: int,
|
|
49
|
+
planned_qty: Optional[int],
|
|
50
|
+
) -> tuple[Optional[int], Optional[float]]:
|
|
51
|
+
"""
|
|
52
|
+
Calculate variance between actual and planned quantities.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
tuple of (variance, variance_pct) - both None if planned_qty is None or zero
|
|
56
|
+
"""
|
|
57
|
+
if planned_qty is None or planned_qty <= 0:
|
|
58
|
+
return None, None
|
|
59
|
+
variance = actual_qty - planned_qty
|
|
60
|
+
variance_pct = (variance / planned_qty) * 100
|
|
61
|
+
return variance, variance_pct
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def match_allocated_to_planned_drop(
|
|
65
|
+
allocated_bol: AllocatedBOL,
|
|
66
|
+
planned_drops: list[PlannedDrop],
|
|
67
|
+
) -> tuple[Optional[PlannedDrop], Optional[PlannedDropDetail]]:
|
|
68
|
+
"""
|
|
69
|
+
Match an allocated BOL to a planned drop detail.
|
|
70
|
+
|
|
71
|
+
Match keys:
|
|
72
|
+
- allocated_bol.location_id == planned_drop.location_id
|
|
73
|
+
- allocated_bol.store_product_id == planned_drop.detail.product_id
|
|
74
|
+
- allocated_bol.store_tank == planned_drop.detail.tank_id
|
|
75
|
+
"""
|
|
76
|
+
for drop in planned_drops:
|
|
77
|
+
if drop.location_id != allocated_bol.location_id:
|
|
78
|
+
continue
|
|
79
|
+
for detail in drop.details:
|
|
80
|
+
if (
|
|
81
|
+
detail.product_id == allocated_bol.store_product_id
|
|
82
|
+
and detail.tank_id == allocated_bol.store_tank
|
|
83
|
+
):
|
|
84
|
+
return drop, detail
|
|
85
|
+
return None, None
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def match_drop_to_load(
|
|
89
|
+
drop_detail: Optional[PlannedDropDetail],
|
|
90
|
+
planned_loads: list[PlannedLoad],
|
|
91
|
+
) -> tuple[Optional[PlannedLoad], Optional[PlannedLoadDetail]]:
|
|
92
|
+
"""
|
|
93
|
+
Match a planned drop detail to a planned load detail using compartment_index.
|
|
94
|
+
|
|
95
|
+
The bridge: drop_detail.sources[].compartment_index -> load_detail.compartment_index
|
|
96
|
+
"""
|
|
97
|
+
if drop_detail is None:
|
|
98
|
+
return None, None
|
|
99
|
+
|
|
100
|
+
compartment_indexes = {s.compartment_index for s in drop_detail.sources}
|
|
101
|
+
|
|
102
|
+
for load in planned_loads:
|
|
103
|
+
for load_detail in load.details:
|
|
104
|
+
if load_detail.compartment_index in compartment_indexes:
|
|
105
|
+
return load, load_detail
|
|
106
|
+
return None, None
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def match_allocated_to_executed_bol(
|
|
110
|
+
allocated_bol: AllocatedBOL,
|
|
111
|
+
executed_bols: list[BOL],
|
|
112
|
+
) -> tuple[Optional[BOL], Optional[BOLDetail]]:
|
|
113
|
+
"""
|
|
114
|
+
Match an allocated BOL to an executed BOL detail.
|
|
115
|
+
|
|
116
|
+
Match keys:
|
|
117
|
+
- allocated_bol.bol_terminal_id == executed_bol.location_id
|
|
118
|
+
- allocated_bol.bol_product_id == executed_bol.detail.product_id
|
|
119
|
+
"""
|
|
120
|
+
for bol in executed_bols:
|
|
121
|
+
if bol.location_id != allocated_bol.bol_terminal_id:
|
|
122
|
+
continue
|
|
123
|
+
for detail in bol.details:
|
|
124
|
+
if detail.product_id == allocated_bol.bol_product_id:
|
|
125
|
+
return bol, detail
|
|
126
|
+
return None, None
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def match_allocated_to_executed_drop(
|
|
130
|
+
allocated_bol: AllocatedBOL,
|
|
131
|
+
executed_drops: list[ExecutedDrop],
|
|
132
|
+
) -> Optional[ExecutedDrop]:
|
|
133
|
+
"""
|
|
134
|
+
Match an allocated BOL to an executed drop.
|
|
135
|
+
|
|
136
|
+
Match keys:
|
|
137
|
+
- allocated_bol.location_id == executed_drop.location_id
|
|
138
|
+
- allocated_bol.store_product_id == executed_drop.product_id
|
|
139
|
+
- allocated_bol.store_tank == executed_drop.tank_id
|
|
140
|
+
"""
|
|
141
|
+
for drop in executed_drops:
|
|
142
|
+
if (
|
|
143
|
+
drop.location_id == allocated_bol.location_id
|
|
144
|
+
and drop.product_id == allocated_bol.store_product_id
|
|
145
|
+
and drop.tank_id == allocated_bol.store_tank
|
|
146
|
+
):
|
|
147
|
+
return drop
|
|
148
|
+
return None
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def match_allocations(
|
|
152
|
+
allocated_bols: list[dict],
|
|
153
|
+
executed_bols: list[dict],
|
|
154
|
+
executed_drops: list[dict],
|
|
155
|
+
planned_loads: list[dict],
|
|
156
|
+
planned_drops: list[dict],
|
|
157
|
+
) -> list[MatchedAllocation]:
|
|
158
|
+
"""
|
|
159
|
+
Match allocated BOLs to planned and executed data.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
allocated_bols: List of allocated_bols from bols_and_drops response
|
|
163
|
+
executed_bols: List of bols from bols_and_drops response
|
|
164
|
+
executed_drops: List of drops from bols_and_drops response
|
|
165
|
+
planned_loads: List of loads from get_orders response
|
|
166
|
+
planned_drops: List of drops from get_orders response
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
List of MatchedAllocation results with variance calculations
|
|
170
|
+
"""
|
|
171
|
+
# Parse inputs into Pydantic models
|
|
172
|
+
parsed_allocated = [AllocatedBOL.model_validate(ab) for ab in allocated_bols]
|
|
173
|
+
parsed_executed_bols = [BOL.model_validate(b) for b in executed_bols]
|
|
174
|
+
parsed_executed_drops = [ExecutedDrop.model_validate(d) for d in executed_drops]
|
|
175
|
+
parsed_planned_loads = [PlannedLoad.model_validate(l) for l in planned_loads]
|
|
176
|
+
parsed_planned_drops = [PlannedDrop.model_validate(d) for d in planned_drops]
|
|
177
|
+
|
|
178
|
+
results: list[MatchedAllocation] = []
|
|
179
|
+
|
|
180
|
+
for allocated_bol in parsed_allocated:
|
|
181
|
+
# Step 1: Match to planned drop
|
|
182
|
+
planned_drop, planned_drop_detail = match_allocated_to_planned_drop(
|
|
183
|
+
allocated_bol, parsed_planned_drops
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
# Step 2: Match planned drop to planned load via compartment
|
|
187
|
+
planned_load, planned_load_detail = match_drop_to_load(
|
|
188
|
+
planned_drop_detail, parsed_planned_loads
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
# Step 3: Match to executed BOL
|
|
192
|
+
executed_bol, executed_bol_detail = match_allocated_to_executed_bol(
|
|
193
|
+
allocated_bol, parsed_executed_bols
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
# Step 4: Match to executed drop
|
|
197
|
+
executed_drop = match_allocated_to_executed_drop(
|
|
198
|
+
allocated_bol, parsed_executed_drops
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
# Step 5: Calculate variance
|
|
202
|
+
actual_qty = allocated_bol.bol_gross_volume_allocated
|
|
203
|
+
planned_qty = planned_drop_detail.quantity if planned_drop_detail else None
|
|
204
|
+
variance, variance_pct = calculate_volume_variance(actual_qty, planned_qty)
|
|
205
|
+
|
|
206
|
+
results.append(
|
|
207
|
+
MatchedAllocation(
|
|
208
|
+
allocated_bol=allocated_bol,
|
|
209
|
+
executed_bol=executed_bol,
|
|
210
|
+
executed_bol_detail=executed_bol_detail,
|
|
211
|
+
executed_drop=executed_drop,
|
|
212
|
+
planned_load=planned_load,
|
|
213
|
+
planned_load_detail=planned_load_detail,
|
|
214
|
+
planned_drop=planned_drop,
|
|
215
|
+
planned_drop_detail=planned_drop_detail,
|
|
216
|
+
planned_quantity=planned_qty,
|
|
217
|
+
actual_quantity=actual_qty,
|
|
218
|
+
variance=variance,
|
|
219
|
+
variance_pct=variance_pct,
|
|
220
|
+
matched_to_planned_drop=planned_drop is not None,
|
|
221
|
+
matched_to_planned_load=planned_load is not None,
|
|
222
|
+
matched_to_executed_bol=executed_bol is not None,
|
|
223
|
+
matched_to_executed_drop=executed_drop is not None,
|
|
224
|
+
)
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
return results
|