bb-integrations-library 3.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bb_integrations_lib/__init__.py +0 -0
- bb_integrations_lib/converters/__init__.py +0 -0
- bb_integrations_lib/gravitate/__init__.py +0 -0
- bb_integrations_lib/gravitate/base_api.py +20 -0
- bb_integrations_lib/gravitate/model.py +29 -0
- bb_integrations_lib/gravitate/pe_api.py +122 -0
- bb_integrations_lib/gravitate/rita_api.py +552 -0
- bb_integrations_lib/gravitate/sd_api.py +572 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
- bb_integrations_lib/gravitate/testing/__init__.py +0 -0
- bb_integrations_lib/gravitate/testing/builder.py +55 -0
- bb_integrations_lib/gravitate/testing/openapi.py +70 -0
- bb_integrations_lib/gravitate/testing/util.py +274 -0
- bb_integrations_lib/mappers/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/model.py +106 -0
- bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
- bb_integrations_lib/mappers/prices/protocol.py +20 -0
- bb_integrations_lib/mappers/prices/util.py +61 -0
- bb_integrations_lib/mappers/rita_mapper.py +523 -0
- bb_integrations_lib/models/__init__.py +0 -0
- bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
- bb_integrations_lib/models/enums.py +28 -0
- bb_integrations_lib/models/pipeline_structs.py +76 -0
- bb_integrations_lib/models/probe/probe_event.py +20 -0
- bb_integrations_lib/models/probe/request_data.py +431 -0
- bb_integrations_lib/models/probe/resume_token.py +7 -0
- bb_integrations_lib/models/rita/audit.py +113 -0
- bb_integrations_lib/models/rita/auth.py +30 -0
- bb_integrations_lib/models/rita/bucket.py +17 -0
- bb_integrations_lib/models/rita/config.py +188 -0
- bb_integrations_lib/models/rita/constants.py +19 -0
- bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
- bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
- bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
- bb_integrations_lib/models/rita/crossroads_network.py +41 -0
- bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
- bb_integrations_lib/models/rita/email.py +39 -0
- bb_integrations_lib/models/rita/issue.py +63 -0
- bb_integrations_lib/models/rita/mapping.py +227 -0
- bb_integrations_lib/models/rita/probe.py +58 -0
- bb_integrations_lib/models/rita/reference_data.py +110 -0
- bb_integrations_lib/models/rita/source_system.py +9 -0
- bb_integrations_lib/models/rita/workers.py +76 -0
- bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
- bb_integrations_lib/models/sd/get_order.py +301 -0
- bb_integrations_lib/models/sd/orders.py +18 -0
- bb_integrations_lib/models/sd_api.py +115 -0
- bb_integrations_lib/pipelines/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
- bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
- bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
- bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
- bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
- bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
- bb_integrations_lib/pipelines/shared/__init__.py +0 -0
- bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
- bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
- bb_integrations_lib/pipelines/steps/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
- bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
- bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
- bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
- bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
- bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
- bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
- bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
- bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
- bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
- bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
- bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
- bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
- bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
- bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
- bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
- bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
- bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
- bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
- bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
- bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
- bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
- bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
- bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
- bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
- bb_integrations_lib/pipelines/steps/null_step.py +15 -0
- bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
- bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
- bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
- bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
- bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
- bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
- bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
- bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
- bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
- bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
- bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
- bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
- bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
- bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
- bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
- bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
- bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
- bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
- bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
- bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
- bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
- bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
- bb_integrations_lib/protocols/__init__.py +0 -0
- bb_integrations_lib/protocols/flat_file.py +210 -0
- bb_integrations_lib/protocols/gravitate_client.py +104 -0
- bb_integrations_lib/protocols/pipelines.py +697 -0
- bb_integrations_lib/provider/__init__.py +0 -0
- bb_integrations_lib/provider/api/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/client.py +43 -0
- bb_integrations_lib/provider/api/cargas/model.py +49 -0
- bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
- bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
- bb_integrations_lib/provider/api/dtn/client.py +128 -0
- bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
- bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
- bb_integrations_lib/provider/api/keyvu/client.py +30 -0
- bb_integrations_lib/provider/api/keyvu/model.py +149 -0
- bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/macropoint/client.py +28 -0
- bb_integrations_lib/provider/api/macropoint/model.py +40 -0
- bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
- bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
- bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
- bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
- bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
- bb_integrations_lib/provider/api/platform_science/client.py +147 -0
- bb_integrations_lib/provider/api/platform_science/model.py +82 -0
- bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
- bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
- bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/telapoint/client.py +68 -0
- bb_integrations_lib/provider/api/telapoint/model.py +178 -0
- bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
- bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
- bb_integrations_lib/provider/aws/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/client.py +126 -0
- bb_integrations_lib/provider/ftp/__init__.py +0 -0
- bb_integrations_lib/provider/ftp/client.py +140 -0
- bb_integrations_lib/provider/ftp/interface.py +273 -0
- bb_integrations_lib/provider/ftp/model.py +76 -0
- bb_integrations_lib/provider/imap/__init__.py +0 -0
- bb_integrations_lib/provider/imap/client.py +228 -0
- bb_integrations_lib/provider/imap/model.py +3 -0
- bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
- bb_integrations_lib/provider/sqlserver/client.py +106 -0
- bb_integrations_lib/secrets/__init__.py +4 -0
- bb_integrations_lib/secrets/adapters.py +98 -0
- bb_integrations_lib/secrets/credential_models.py +222 -0
- bb_integrations_lib/secrets/factory.py +85 -0
- bb_integrations_lib/secrets/providers.py +160 -0
- bb_integrations_lib/shared/__init__.py +0 -0
- bb_integrations_lib/shared/exceptions.py +25 -0
- bb_integrations_lib/shared/model.py +1039 -0
- bb_integrations_lib/shared/shared_enums.py +510 -0
- bb_integrations_lib/storage/README.md +236 -0
- bb_integrations_lib/storage/__init__.py +0 -0
- bb_integrations_lib/storage/aws/__init__.py +0 -0
- bb_integrations_lib/storage/aws/s3.py +8 -0
- bb_integrations_lib/storage/defaults.py +72 -0
- bb_integrations_lib/storage/gcs/__init__.py +0 -0
- bb_integrations_lib/storage/gcs/client.py +8 -0
- bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
- bb_integrations_lib/storage/gcsmanager/client.py +8 -0
- bb_integrations_lib/storage/setup.py +29 -0
- bb_integrations_lib/util/__init__.py +0 -0
- bb_integrations_lib/util/cache/__init__.py +0 -0
- bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
- bb_integrations_lib/util/cache/protocol.py +9 -0
- bb_integrations_lib/util/config/__init__.py +0 -0
- bb_integrations_lib/util/config/manager.py +391 -0
- bb_integrations_lib/util/config/model.py +41 -0
- bb_integrations_lib/util/exception_logger/__init__.py +0 -0
- bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
- bb_integrations_lib/util/exception_logger/test.py +114 -0
- bb_integrations_lib/util/utils.py +364 -0
- bb_integrations_lib/workers/__init__.py +0 -0
- bb_integrations_lib/workers/groups.py +13 -0
- bb_integrations_lib/workers/rpc_worker.py +50 -0
- bb_integrations_lib/workers/topics.py +20 -0
- bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
- bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
- bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from datetime import datetime, UTC, timedelta
|
|
3
|
+
from typing import Dict, Any, Optional, Tuple, Iterable
|
|
4
|
+
from zoneinfo import ZoneInfo
|
|
5
|
+
|
|
6
|
+
import pandas as pd
|
|
7
|
+
from bson import RawBSONDocument
|
|
8
|
+
from loguru import logger
|
|
9
|
+
from pandas import DataFrame
|
|
10
|
+
from pymongo import MongoClient
|
|
11
|
+
from pymongo.synchronous.database import Database, Collection
|
|
12
|
+
|
|
13
|
+
from bb_integrations_lib.gravitate.rita_api import GravitateRitaAPI
|
|
14
|
+
from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
|
|
15
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
16
|
+
from bb_integrations_lib.shared.model import ReadingQuery, ExportReadingsWindowMode
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class BBDExportReadingsStep(Step):
|
|
20
|
+
def __init__(self, rita_client: GravitateRitaAPI, sd_client: GravitateSDAPI, ims_database: Database,
|
|
21
|
+
reading_query: ReadingQuery, timezone: str = "UTC",
|
|
22
|
+
window_mode: ExportReadingsWindowMode = ExportReadingsWindowMode.HOURS_BACK, hours_back: float = 6,
|
|
23
|
+
batch_hours: float = 2, big_dataset: int = 750, *args, **kwargs):
|
|
24
|
+
super().__init__(*args, **kwargs)
|
|
25
|
+
self.rita_client = rita_client
|
|
26
|
+
self.sd_client = sd_client
|
|
27
|
+
self.ims_database = ims_database
|
|
28
|
+
|
|
29
|
+
self.reading_query = reading_query
|
|
30
|
+
self.timezone = timezone
|
|
31
|
+
self.window_mode = window_mode
|
|
32
|
+
self.hours_back = hours_back
|
|
33
|
+
self.batch_hours = batch_hours
|
|
34
|
+
self.big_dataset = big_dataset
|
|
35
|
+
|
|
36
|
+
def describe(self) -> str:
|
|
37
|
+
return "Export tank readings from BBD with a customizable query"
|
|
38
|
+
|
|
39
|
+
async def execute(self, _: Any) -> Tuple[Dict, Dict, Iterable]:
|
|
40
|
+
# Compute start/end windows based on the provided configuration
|
|
41
|
+
if self.window_mode == ExportReadingsWindowMode.HOURS_BACK or self.window_mode == ExportReadingsWindowMode.LATEST_ONLY:
|
|
42
|
+
now = datetime.now(UTC)
|
|
43
|
+
window_start = now - timedelta(hours=self.hours_back)
|
|
44
|
+
window_end = now
|
|
45
|
+
elif self.window_mode == ExportReadingsWindowMode.PREVIOUS_DAY:
|
|
46
|
+
window_end = datetime.now(ZoneInfo(self.timezone)).replace(hour=0, minute=0, second=0, microsecond=0)
|
|
47
|
+
window_start = window_end - timedelta(days=1)
|
|
48
|
+
else:
|
|
49
|
+
raise Exception(f"Unknown window mode {self.window_mode}")
|
|
50
|
+
|
|
51
|
+
if self.window_mode == ExportReadingsWindowMode.HOURS_BACK:
|
|
52
|
+
logger.info(
|
|
53
|
+
f"Exporting tank readings from BBD for the last {self.hours_back} hours "
|
|
54
|
+
f"(from {window_start} to {window_end})"
|
|
55
|
+
)
|
|
56
|
+
elif self.window_mode == ExportReadingsWindowMode.LATEST_ONLY:
|
|
57
|
+
logger.info(
|
|
58
|
+
f"Exporting ONLY the most recent tank reading from BBD for the last {self.hours_back} hours "
|
|
59
|
+
f"(from {window_start} to {window_end}"
|
|
60
|
+
)
|
|
61
|
+
elif self.window_mode == ExportReadingsWindowMode.PREVIOUS_DAY:
|
|
62
|
+
logger.info(f"Exporting tank readings from BBD for the previous day (from {window_start} to {window_end})")
|
|
63
|
+
|
|
64
|
+
store_lkp, tank_lkp, filtered_stores = await self.get_sites_and_tank_ids_to_export()
|
|
65
|
+
|
|
66
|
+
if len(filtered_stores) > self.big_dataset and self.reading_query.by_wildcard is None:
|
|
67
|
+
raise ValueError(
|
|
68
|
+
f"Large dataset ({len(filtered_stores)} stores) detected. "
|
|
69
|
+
f"Must use wildcard query strategy (by_wildcard) for datasets > 500 stores."
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
use_or_filter = len(filtered_stores) <= self.big_dataset
|
|
73
|
+
|
|
74
|
+
return await self.get_inventory(
|
|
75
|
+
window_start=window_start,
|
|
76
|
+
window_end=window_end,
|
|
77
|
+
latest_only=self.window_mode == ExportReadingsWindowMode.LATEST_ONLY,
|
|
78
|
+
store_lkp=store_lkp,
|
|
79
|
+
tank_lkp=tank_lkp,
|
|
80
|
+
filtered_stores=filtered_stores,
|
|
81
|
+
use_or_filter=use_or_filter,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
@staticmethod
|
|
85
|
+
def collection_as_raw_bson(collection: Collection):
|
|
86
|
+
return collection.with_options(
|
|
87
|
+
codec_options=collection.codec_options.with_options(document_class=RawBSONDocument))
|
|
88
|
+
|
|
89
|
+
async def get_sites_and_tank_ids_to_export(self, query: Optional[ReadingQuery] = None) -> Tuple[
|
|
90
|
+
Dict, Dict, DataFrame]:
|
|
91
|
+
query_to_use = query if query else self.reading_query
|
|
92
|
+
stores = (await self.sd_client.get_all_stores(include_tanks=True)).json()
|
|
93
|
+
filtered_stores = self.apply_reading_query_filter(stores=stores, query=query_to_use)
|
|
94
|
+
filtered_store_numbers = filtered_stores["store_number"].to_list()
|
|
95
|
+
store_lkp, tank_lkp = BBDExportReadingsStep.lkps(
|
|
96
|
+
[store for store in stores if store["store_number"] in filtered_store_numbers])
|
|
97
|
+
return store_lkp, tank_lkp, filtered_stores
|
|
98
|
+
|
|
99
|
+
def apply_reading_query_filter(self, stores: list[Dict], query: Optional[ReadingQuery] = None) -> DataFrame:
|
|
100
|
+
query_to_use = query if query else self.reading_query
|
|
101
|
+
df = pd.DataFrame(stores)
|
|
102
|
+
tanks_df = df.explode("tanks")
|
|
103
|
+
tanks_df = tanks_df[~tanks_df["tanks"].isna()].reset_index()
|
|
104
|
+
tanks_df["tank_id"] = tanks_df["tanks"].apply(lambda x: x["tank_id"])
|
|
105
|
+
tanks_df["composite_key"] = tanks_df["store_number"] + ":" + tanks_df["tank_id"].astype(str)
|
|
106
|
+
if query_to_use.by_wildcard is not None and query_to_use.by_wildcard == "*":
|
|
107
|
+
return tanks_df[["store_number", "tank_id", "composite_key"]]
|
|
108
|
+
mask = query_to_use.as_mask(tanks_df)
|
|
109
|
+
filtered = tanks_df[mask]
|
|
110
|
+
return filtered[["store_number", "tank_id", "composite_key"]]
|
|
111
|
+
|
|
112
|
+
@staticmethod
|
|
113
|
+
def ims_query_pairs(filtered_stores: DataFrame) -> list:
|
|
114
|
+
pairs = []
|
|
115
|
+
for _, row in filtered_stores.iterrows():
|
|
116
|
+
pairs.append({
|
|
117
|
+
"store_number": row["store_number"],
|
|
118
|
+
"tank_id": str(row["tank_id"])
|
|
119
|
+
})
|
|
120
|
+
return pairs
|
|
121
|
+
|
|
122
|
+
async def get_inventory(self,
|
|
123
|
+
window_start: datetime,
|
|
124
|
+
window_end: datetime,
|
|
125
|
+
latest_only: bool,
|
|
126
|
+
store_lkp: dict,
|
|
127
|
+
tank_lkp: dict,
|
|
128
|
+
filtered_stores: DataFrame | None = None,
|
|
129
|
+
use_or_filter: bool = True, # Add this parameter
|
|
130
|
+
) -> Tuple[Dict, Dict, Iterable]:
|
|
131
|
+
"""
|
|
132
|
+
Get inventory / tank readings, filtering to the stores and tanks specified by reading_query.
|
|
133
|
+
:param window_start: Include readings newer than or equal to this datetime.
|
|
134
|
+
:param window_end: Include readings older than this datetime.
|
|
135
|
+
:param latest_only: Whether only the latest reading within the window should be provided.
|
|
136
|
+
:param store_lkp: Optional pre-fetched store lookup dict.
|
|
137
|
+
:param tank_lkp: Optional pre-fetched tank lookup dict.
|
|
138
|
+
:param filtered_stores: Optional pre-fetched filtered stores DataFrame.
|
|
139
|
+
:param use_or_filter: Whether to include $or clause in aggregation for store/tank filtering.
|
|
140
|
+
:return: A tuple containing a store lookup dict, tank lookup dict, and iterable of tank reading documents.
|
|
141
|
+
"""
|
|
142
|
+
ims_pairs = BBDExportReadingsStep.ims_query_pairs(filtered_stores)
|
|
143
|
+
collection = self.ims_database['tank_inventory_log']
|
|
144
|
+
|
|
145
|
+
match_conditions: list[dict] = [
|
|
146
|
+
{
|
|
147
|
+
"read_time": {
|
|
148
|
+
"$gte": window_start,
|
|
149
|
+
"$lte": window_end
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
]
|
|
153
|
+
|
|
154
|
+
if use_or_filter:
|
|
155
|
+
match_conditions.append({
|
|
156
|
+
"$or": ims_pairs
|
|
157
|
+
})
|
|
158
|
+
|
|
159
|
+
_query: list[dict] = [
|
|
160
|
+
{
|
|
161
|
+
"$match": {
|
|
162
|
+
"$and": match_conditions
|
|
163
|
+
}
|
|
164
|
+
},
|
|
165
|
+
{
|
|
166
|
+
"$sort": {
|
|
167
|
+
"read_time": 1
|
|
168
|
+
}
|
|
169
|
+
},
|
|
170
|
+
{
|
|
171
|
+
"$project": {
|
|
172
|
+
"tank_agent_name": 1,
|
|
173
|
+
"store_number": 1,
|
|
174
|
+
"run_time": 1,
|
|
175
|
+
"tank_id": 1,
|
|
176
|
+
"read_time": 1,
|
|
177
|
+
"product": 1,
|
|
178
|
+
"monitor_type": 1,
|
|
179
|
+
"volume": 1,
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
]
|
|
183
|
+
# If we're getting only the most recent tank reading, aggregate by store and tank, then grab the latest
|
|
184
|
+
# reading document from the lookback period.
|
|
185
|
+
if latest_only:
|
|
186
|
+
_query.append({
|
|
187
|
+
"$group": {
|
|
188
|
+
"_id": {
|
|
189
|
+
"store_number": "$store_number",
|
|
190
|
+
"tank_id": "$tank_id"
|
|
191
|
+
},
|
|
192
|
+
"documents": {
|
|
193
|
+
"$last": "$$ROOT"
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
})
|
|
197
|
+
|
|
198
|
+
cursor = collection.aggregate(_query)
|
|
199
|
+
# Have to extract the latest document from the aggregate if we're grabbing the latest, as the query result
|
|
200
|
+
# set is shaped differently.
|
|
201
|
+
if latest_only:
|
|
202
|
+
return store_lkp, tank_lkp, (r["documents"] for r in cursor)
|
|
203
|
+
return store_lkp, tank_lkp, cursor
|
|
204
|
+
|
|
205
|
+
@staticmethod
|
|
206
|
+
def lkps(stores: Iterable) -> Tuple[Dict, Dict]:
|
|
207
|
+
store_lkp = {}
|
|
208
|
+
tank_lkp = {}
|
|
209
|
+
for store in stores:
|
|
210
|
+
store_number = store['store_number']
|
|
211
|
+
tanks = store['tanks']
|
|
212
|
+
for tank in tanks:
|
|
213
|
+
tank_id = tank['tank_id']
|
|
214
|
+
store_lkp[f"{store_number}"] = store
|
|
215
|
+
tank_lkp[f"{store_number}:{tank_id}"] = tank
|
|
216
|
+
return store_lkp, tank_lkp
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
if __name__ == "__main__":
|
|
220
|
+
export = BBDExportReadingsStep(
|
|
221
|
+
rita_client=GravitateRitaAPI(
|
|
222
|
+
base_url="",
|
|
223
|
+
client_id="",
|
|
224
|
+
client_secret=""
|
|
225
|
+
),
|
|
226
|
+
sd_client=GravitateSDAPI(
|
|
227
|
+
base_url="",
|
|
228
|
+
client_id="",
|
|
229
|
+
client_secret=""
|
|
230
|
+
),
|
|
231
|
+
ims_database=MongoClient("mongodb conn str")["db_name"],
|
|
232
|
+
reading_query=ReadingQuery(
|
|
233
|
+
by_store_numbers=["100101"]
|
|
234
|
+
)
|
|
235
|
+
)
|
|
236
|
+
readings = asyncio.run(export.execute("Majors"))
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
from bb_integrations_lib.protocols.pipelines import Step, Input, StepConfig
|
|
4
|
+
from loguru import logger
|
|
5
|
+
|
|
6
|
+
from bb_integrations_lib.provider.api.cargas.client import CargasClient
|
|
7
|
+
from bb_integrations_lib.provider.api.cargas.model import CreateWholesaleTicketRequest, \
|
|
8
|
+
CreateWholesaleTicketRequestBundle
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class CargasWholesaleBundleUploadStep(Step):
|
|
12
|
+
def __init__(self, cargas_client: CargasClient, *args, **kwargs):
|
|
13
|
+
super().__init__(*args, **kwargs)
|
|
14
|
+
|
|
15
|
+
self.cargas_client = cargas_client
|
|
16
|
+
|
|
17
|
+
def describe(self):
|
|
18
|
+
return "Upload a Cargas Wholesale ticket request bundle"
|
|
19
|
+
|
|
20
|
+
async def execute(self, i: CreateWholesaleTicketRequestBundle) -> None:
|
|
21
|
+
cwt_resp = await self.cargas_client.create_wholesale_ticket(i.ticket_request)
|
|
22
|
+
doc_id = cwt_resp["ResponseValues"]["DocumentID"]
|
|
23
|
+
|
|
24
|
+
# Now try to upload each line
|
|
25
|
+
for index, line in enumerate(i.line_requests):
|
|
26
|
+
try:
|
|
27
|
+
line.DocumentID = int(doc_id)
|
|
28
|
+
await self.cargas_client.create_wholesale_line(line)
|
|
29
|
+
except Exception as e:
|
|
30
|
+
logger.error(f"Failed to upload line {index} to Cargas: {e}")
|
|
31
|
+
print(cwt_resp)
|
|
32
|
+
|
|
33
|
+
# TODO: Upload fee line items
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from datetime import datetime, UTC
|
|
2
|
+
|
|
3
|
+
from pandas import DataFrame
|
|
4
|
+
|
|
5
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
6
|
+
from bb_integrations_lib.shared.model import FileReference, FileType
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class DataFrameFlatFileExportStep(Step):
|
|
10
|
+
def __init__(self, file_path: str, file_type: FileType, *args, **kwargs):
|
|
11
|
+
super().__init__(*args, **kwargs)
|
|
12
|
+
|
|
13
|
+
file_path = file_path
|
|
14
|
+
if "{date}" in file_path:
|
|
15
|
+
file_path = file_path.replace("{date}", datetime.now(UTC).strftime("%Y%m%d"))
|
|
16
|
+
|
|
17
|
+
self.output = FileReference(file_path, file_type)
|
|
18
|
+
|
|
19
|
+
def describe(self) -> str:
|
|
20
|
+
return f"Exporting DataFrame to flat file {self.output.file_path}"
|
|
21
|
+
|
|
22
|
+
async def execute(self, i: DataFrame) -> FileReference:
|
|
23
|
+
if self.output.file_type == FileType.excel:
|
|
24
|
+
i.to_excel(self.output.file_path, index=False)
|
|
25
|
+
elif self.output.file_type == FileType.csv:
|
|
26
|
+
i.to_csv(self.output.file_path, index=False)
|
|
27
|
+
else:
|
|
28
|
+
raise NotImplementedError(f"Unsupported file type: {self.output.file_type}")
|
|
29
|
+
return self.output
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Any, Dict
|
|
3
|
+
|
|
4
|
+
from bb_integrations_lib.shared.model import FileReference, File
|
|
5
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
6
|
+
from bb_integrations_lib.provider.gcp.cloud_storage.client import CloudStorageClient
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class GCSExportFileStep(Step[Any, Any, None]):
|
|
10
|
+
def __init__(self, step_configuration: Dict[str, str]) -> None:
|
|
11
|
+
super().__init__(step_configuration)
|
|
12
|
+
self.gcs_client = CloudStorageClient()
|
|
13
|
+
self.bucket = step_configuration['bucket']
|
|
14
|
+
|
|
15
|
+
def describe(self) -> str:
|
|
16
|
+
return "Exporting file to GCS bucket"
|
|
17
|
+
|
|
18
|
+
async def execute(self, i: FileReference) -> FileReference:
|
|
19
|
+
file_name = os.path.basename(i.file_path)
|
|
20
|
+
with open(i.file_path, "rb") as f:
|
|
21
|
+
file_data = f.read()
|
|
22
|
+
file = File(
|
|
23
|
+
file_name=file_name,
|
|
24
|
+
file_data=file_data
|
|
25
|
+
)
|
|
26
|
+
try:
|
|
27
|
+
self.gcs_client.upload_file(file, self.bucket)
|
|
28
|
+
return i
|
|
29
|
+
except FileExistsError:
|
|
30
|
+
# If run twice and the file was already archived, we don't need to archive another copy.
|
|
31
|
+
return i
|
|
32
|
+
except Exception as e:
|
|
33
|
+
raise e
|
|
34
|
+
|