bb-integrations-library 3.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bb_integrations_lib/__init__.py +0 -0
- bb_integrations_lib/converters/__init__.py +0 -0
- bb_integrations_lib/gravitate/__init__.py +0 -0
- bb_integrations_lib/gravitate/base_api.py +20 -0
- bb_integrations_lib/gravitate/model.py +29 -0
- bb_integrations_lib/gravitate/pe_api.py +122 -0
- bb_integrations_lib/gravitate/rita_api.py +552 -0
- bb_integrations_lib/gravitate/sd_api.py +572 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
- bb_integrations_lib/gravitate/testing/__init__.py +0 -0
- bb_integrations_lib/gravitate/testing/builder.py +55 -0
- bb_integrations_lib/gravitate/testing/openapi.py +70 -0
- bb_integrations_lib/gravitate/testing/util.py +274 -0
- bb_integrations_lib/mappers/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/model.py +106 -0
- bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
- bb_integrations_lib/mappers/prices/protocol.py +20 -0
- bb_integrations_lib/mappers/prices/util.py +61 -0
- bb_integrations_lib/mappers/rita_mapper.py +523 -0
- bb_integrations_lib/models/__init__.py +0 -0
- bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
- bb_integrations_lib/models/enums.py +28 -0
- bb_integrations_lib/models/pipeline_structs.py +76 -0
- bb_integrations_lib/models/probe/probe_event.py +20 -0
- bb_integrations_lib/models/probe/request_data.py +431 -0
- bb_integrations_lib/models/probe/resume_token.py +7 -0
- bb_integrations_lib/models/rita/audit.py +113 -0
- bb_integrations_lib/models/rita/auth.py +30 -0
- bb_integrations_lib/models/rita/bucket.py +17 -0
- bb_integrations_lib/models/rita/config.py +188 -0
- bb_integrations_lib/models/rita/constants.py +19 -0
- bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
- bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
- bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
- bb_integrations_lib/models/rita/crossroads_network.py +41 -0
- bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
- bb_integrations_lib/models/rita/email.py +39 -0
- bb_integrations_lib/models/rita/issue.py +63 -0
- bb_integrations_lib/models/rita/mapping.py +227 -0
- bb_integrations_lib/models/rita/probe.py +58 -0
- bb_integrations_lib/models/rita/reference_data.py +110 -0
- bb_integrations_lib/models/rita/source_system.py +9 -0
- bb_integrations_lib/models/rita/workers.py +76 -0
- bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
- bb_integrations_lib/models/sd/get_order.py +301 -0
- bb_integrations_lib/models/sd/orders.py +18 -0
- bb_integrations_lib/models/sd_api.py +115 -0
- bb_integrations_lib/pipelines/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
- bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
- bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
- bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
- bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
- bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
- bb_integrations_lib/pipelines/shared/__init__.py +0 -0
- bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
- bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
- bb_integrations_lib/pipelines/steps/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
- bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
- bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
- bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
- bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
- bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
- bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
- bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
- bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
- bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
- bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
- bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
- bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
- bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
- bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
- bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
- bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
- bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
- bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
- bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
- bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
- bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
- bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
- bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
- bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
- bb_integrations_lib/pipelines/steps/null_step.py +15 -0
- bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
- bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
- bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
- bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
- bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
- bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
- bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
- bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
- bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
- bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
- bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
- bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
- bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
- bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
- bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
- bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
- bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
- bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
- bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
- bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
- bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
- bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
- bb_integrations_lib/protocols/__init__.py +0 -0
- bb_integrations_lib/protocols/flat_file.py +210 -0
- bb_integrations_lib/protocols/gravitate_client.py +104 -0
- bb_integrations_lib/protocols/pipelines.py +697 -0
- bb_integrations_lib/provider/__init__.py +0 -0
- bb_integrations_lib/provider/api/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/client.py +43 -0
- bb_integrations_lib/provider/api/cargas/model.py +49 -0
- bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
- bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
- bb_integrations_lib/provider/api/dtn/client.py +128 -0
- bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
- bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
- bb_integrations_lib/provider/api/keyvu/client.py +30 -0
- bb_integrations_lib/provider/api/keyvu/model.py +149 -0
- bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/macropoint/client.py +28 -0
- bb_integrations_lib/provider/api/macropoint/model.py +40 -0
- bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
- bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
- bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
- bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
- bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
- bb_integrations_lib/provider/api/platform_science/client.py +147 -0
- bb_integrations_lib/provider/api/platform_science/model.py +82 -0
- bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
- bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
- bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/telapoint/client.py +68 -0
- bb_integrations_lib/provider/api/telapoint/model.py +178 -0
- bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
- bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
- bb_integrations_lib/provider/aws/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/client.py +126 -0
- bb_integrations_lib/provider/ftp/__init__.py +0 -0
- bb_integrations_lib/provider/ftp/client.py +140 -0
- bb_integrations_lib/provider/ftp/interface.py +273 -0
- bb_integrations_lib/provider/ftp/model.py +76 -0
- bb_integrations_lib/provider/imap/__init__.py +0 -0
- bb_integrations_lib/provider/imap/client.py +228 -0
- bb_integrations_lib/provider/imap/model.py +3 -0
- bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
- bb_integrations_lib/provider/sqlserver/client.py +106 -0
- bb_integrations_lib/secrets/__init__.py +4 -0
- bb_integrations_lib/secrets/adapters.py +98 -0
- bb_integrations_lib/secrets/credential_models.py +222 -0
- bb_integrations_lib/secrets/factory.py +85 -0
- bb_integrations_lib/secrets/providers.py +160 -0
- bb_integrations_lib/shared/__init__.py +0 -0
- bb_integrations_lib/shared/exceptions.py +25 -0
- bb_integrations_lib/shared/model.py +1039 -0
- bb_integrations_lib/shared/shared_enums.py +510 -0
- bb_integrations_lib/storage/README.md +236 -0
- bb_integrations_lib/storage/__init__.py +0 -0
- bb_integrations_lib/storage/aws/__init__.py +0 -0
- bb_integrations_lib/storage/aws/s3.py +8 -0
- bb_integrations_lib/storage/defaults.py +72 -0
- bb_integrations_lib/storage/gcs/__init__.py +0 -0
- bb_integrations_lib/storage/gcs/client.py +8 -0
- bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
- bb_integrations_lib/storage/gcsmanager/client.py +8 -0
- bb_integrations_lib/storage/setup.py +29 -0
- bb_integrations_lib/util/__init__.py +0 -0
- bb_integrations_lib/util/cache/__init__.py +0 -0
- bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
- bb_integrations_lib/util/cache/protocol.py +9 -0
- bb_integrations_lib/util/config/__init__.py +0 -0
- bb_integrations_lib/util/config/manager.py +391 -0
- bb_integrations_lib/util/config/model.py +41 -0
- bb_integrations_lib/util/exception_logger/__init__.py +0 -0
- bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
- bb_integrations_lib/util/exception_logger/test.py +114 -0
- bb_integrations_lib/util/utils.py +364 -0
- bb_integrations_lib/workers/__init__.py +0 -0
- bb_integrations_lib/workers/groups.py +13 -0
- bb_integrations_lib/workers/rpc_worker.py +50 -0
- bb_integrations_lib/workers/topics.py +20 -0
- bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
- bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
- bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
from datetime import datetime, timedelta, UTC
|
|
2
|
+
|
|
3
|
+
from bson import ObjectId
|
|
4
|
+
from loguru import logger
|
|
5
|
+
from pymongo import MongoClient, AsyncMongoClient
|
|
6
|
+
from pymongo.asynchronous.database import AsyncDatabase
|
|
7
|
+
from pymongo.synchronous.database import Database
|
|
8
|
+
|
|
9
|
+
from bb_integrations_lib.gravitate.rita_api import GravitateRitaAPI
|
|
10
|
+
from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
|
|
11
|
+
from bb_integrations_lib.models.rita.config import GenericConfig
|
|
12
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
13
|
+
from bb_integrations_lib.provider.api.keyvu.model import Delivery, StationDelivery, \
|
|
14
|
+
DeliveryStatus, KeyVuDeliveryPlan, default_serialization_options, StationDeliveryDetails, StationDeliveryBOL, \
|
|
15
|
+
GeoLocation
|
|
16
|
+
from bb_integrations_lib.shared.model import RawData
|
|
17
|
+
|
|
18
|
+
# See also v1 order trip status for better DeliveryStatus mapping
|
|
19
|
+
delivery_status_map = {
|
|
20
|
+
"accepted": DeliveryStatus.planned,
|
|
21
|
+
"assigned": DeliveryStatus.planned,
|
|
22
|
+
"in progress": DeliveryStatus.unloading,
|
|
23
|
+
"complete": DeliveryStatus.delivered,
|
|
24
|
+
"canceled": DeliveryStatus.canceled
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class KeyVuExportStep(Step):
|
|
29
|
+
def __init__(self, rita_client: GravitateRitaAPI, sd_client: GravitateSDAPI, mongo_database: AsyncDatabase,
|
|
30
|
+
time_back: timedelta, only_mapped_sites: bool = True, by_counterparties: list[str] | None = None,
|
|
31
|
+
order_nums: list[int] | None = None, *args, **kwargs):
|
|
32
|
+
"""
|
|
33
|
+
Build a KeyVu delivery plan from recently modified orders in the S&D environment.
|
|
34
|
+
|
|
35
|
+
Automatically uses RITA mappings with a KeyVu source system.
|
|
36
|
+
|
|
37
|
+
:param rita_client: RITA client to retrieve mappings with.
|
|
38
|
+
:param sd_client: S&D client to retrieve orders from.
|
|
39
|
+
:param mongo_database: An initialized MongoDB database to retrieve order details from.
|
|
40
|
+
:param time_back: Gets orders with modification timestamp within the last timedelta duration.
|
|
41
|
+
:param only_mapped_sites: Whether to only include sites with extant Gravitate->KeyVu site ID mappings. If False
|
|
42
|
+
and no mapping is available for a site, the Gravitate location name will be used as the KeyVu site ID.
|
|
43
|
+
:param by_counterparties: Counterparties to include in the delivery plan. If empty, all counterparties are
|
|
44
|
+
included. Intended to be set by the RITA config, but a fallback can be set here.
|
|
45
|
+
"""
|
|
46
|
+
super().__init__(*args, **kwargs)
|
|
47
|
+
self.time_back: timedelta = time_back
|
|
48
|
+
self.order_nums = order_nums
|
|
49
|
+
|
|
50
|
+
self.sd_client = sd_client
|
|
51
|
+
self.rita_client = rita_client
|
|
52
|
+
self.mongo_database = mongo_database
|
|
53
|
+
|
|
54
|
+
self.keyvu_site_mappings = None # To be filled in by execute() (can't load it here since Rita client is async)
|
|
55
|
+
|
|
56
|
+
self.tcn_field_name = "source_system_id"
|
|
57
|
+
# This defaults to true to prevent leaking all sites to SWTO if we don't have mappings
|
|
58
|
+
self.only_mapped_sites = only_mapped_sites
|
|
59
|
+
self.by_counterparties = by_counterparties
|
|
60
|
+
|
|
61
|
+
def describe(self) -> str:
|
|
62
|
+
return "Export recent order updates to KeyVu"
|
|
63
|
+
|
|
64
|
+
async def execute(self, i: None = None) -> KeyVuDeliveryPlan:
|
|
65
|
+
logger.warning("Delivery dates and times are not fully implemented for all order scenarios.")
|
|
66
|
+
|
|
67
|
+
# Load additional configuration from Rita
|
|
68
|
+
try:
|
|
69
|
+
rita_config: GenericConfig = (await self.rita_client.get_config_by_name("/KeyVu", "KeyVu"))["KeyVu"]
|
|
70
|
+
self.tcn_field_name = rita_config.config.get("extra_data_tcn_field", self.tcn_field_name)
|
|
71
|
+
self.only_mapped_sites = rita_config.config.get("only_mapped_sites", self.only_mapped_sites)
|
|
72
|
+
self.by_counterparties = rita_config.config.get("by_counterparties", self.by_counterparties)
|
|
73
|
+
except Exception as e:
|
|
74
|
+
logger.warning(
|
|
75
|
+
f"Failed to load KeyVu config from RITA, using defaults"
|
|
76
|
+
)
|
|
77
|
+
logger.warning(f"Exception: {e}")
|
|
78
|
+
|
|
79
|
+
logger.info(
|
|
80
|
+
f"Configuration: extra_data_tcn_field: '{self.tcn_field_name}', "
|
|
81
|
+
f"only_mapped_sites: {self.only_mapped_sites}, "
|
|
82
|
+
f"by_counterparties: {self.by_counterparties}"
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
# Preload mappings
|
|
86
|
+
maps = await self.rita_client.get_mappings_by_source_system("KeyVu")
|
|
87
|
+
self.keyvu_site_mappings = {x.gravitate_id: x.source_id for x in maps}
|
|
88
|
+
if self.only_mapped_sites and not self.keyvu_site_mappings:
|
|
89
|
+
raise Exception("No KeyVu site mappings found, but only_mapped_sites is True")
|
|
90
|
+
|
|
91
|
+
end_date = datetime.now(UTC).replace(microsecond=0)
|
|
92
|
+
start_date = end_date - self.time_back
|
|
93
|
+
logger.info(f"Downloading orders newer than {start_date}")
|
|
94
|
+
if self.order_nums is not None:
|
|
95
|
+
orders_raw = []
|
|
96
|
+
for i, order_num in enumerate(self.order_nums):
|
|
97
|
+
logger.info(f"Downloading order {order_num} ({i + 1}/{len(self.order_nums)})")
|
|
98
|
+
order_resp = await self.sd_client.get_orders(order_number=order_num)
|
|
99
|
+
order_resp.raise_for_status()
|
|
100
|
+
orders_raw.extend(order_resp.json())
|
|
101
|
+
else:
|
|
102
|
+
order_resp = await self.sd_client.get_orders(last_change_date=start_date)
|
|
103
|
+
orders_raw = order_resp.json()
|
|
104
|
+
|
|
105
|
+
# Filter out orders supposedly changed in the future
|
|
106
|
+
orders = list(filter(
|
|
107
|
+
lambda o: datetime.fromisoformat(o.get("last_change_date")) < datetime.now() + timedelta(days=1),
|
|
108
|
+
orders_raw
|
|
109
|
+
))
|
|
110
|
+
|
|
111
|
+
# Using non-v1 API here
|
|
112
|
+
logger.info("Getting BOL details")
|
|
113
|
+
bol_resp = await self.sd_client.token_post(url="order/bols_and_drops", json={
|
|
114
|
+
"order_ids": [order["order_id"] for order in orders]
|
|
115
|
+
})
|
|
116
|
+
bol_resp.raise_for_status()
|
|
117
|
+
bols_raw = bol_resp.json()
|
|
118
|
+
self.bol_lkp = {int(bol["order_number"]): bol for bol in bols_raw}
|
|
119
|
+
|
|
120
|
+
logger.info("Getting order details from database")
|
|
121
|
+
order_numbers = [order["order_number"] for order in orders]
|
|
122
|
+
order_docs = await self.mongo_database["order_v2"].find({
|
|
123
|
+
"number": {
|
|
124
|
+
"$in": order_numbers
|
|
125
|
+
}
|
|
126
|
+
}).to_list()
|
|
127
|
+
self.order_lkp = {o["number"]: o for o in order_docs}
|
|
128
|
+
|
|
129
|
+
logger.info("Getting location details from API")
|
|
130
|
+
loc_resp = await self.sd_client.all_locations()
|
|
131
|
+
loc_resp.raise_for_status()
|
|
132
|
+
locations_raw = loc_resp.json()
|
|
133
|
+
self.loc_lkp = {loc["id"]: loc for loc in locations_raw}
|
|
134
|
+
|
|
135
|
+
logger.info("Getting counterparty details from API")
|
|
136
|
+
cp_resp = await self.sd_client.all_counterparties()
|
|
137
|
+
cp_resp.raise_for_status()
|
|
138
|
+
cp_raw = cp_resp.json()
|
|
139
|
+
self.cp_lkp = {cp["id"]: cp for cp in cp_raw}
|
|
140
|
+
|
|
141
|
+
logger.info("Getting stores from API")
|
|
142
|
+
store_resp = await self.sd_client.all_stores(include_tanks=False)
|
|
143
|
+
store_resp.raise_for_status()
|
|
144
|
+
store_raw = store_resp.json()
|
|
145
|
+
self.store_lkp = {store["store_number"]: store for store in store_raw}
|
|
146
|
+
|
|
147
|
+
logger.info("Getting driver schedule details from database")
|
|
148
|
+
ds_ids = list(filter(lambda x: x is not None,
|
|
149
|
+
[order.get("driver_schedule_id", None) for number, order in self.order_lkp.items()]))
|
|
150
|
+
ds_ids = [ObjectId(id) for id in ds_ids]
|
|
151
|
+
ds_docs = await self.mongo_database["driver_schedule"].find({
|
|
152
|
+
"_id": {
|
|
153
|
+
"$in": ds_ids
|
|
154
|
+
}
|
|
155
|
+
}).to_list()
|
|
156
|
+
self.driver_sched_lkp = {str(ds["_id"]): ds for ds in ds_docs}
|
|
157
|
+
|
|
158
|
+
logger.info("Building delivery plan models")
|
|
159
|
+
export_date = datetime.now(UTC).replace(microsecond=0)
|
|
160
|
+
deliveries = []
|
|
161
|
+
for o in orders:
|
|
162
|
+
try:
|
|
163
|
+
deliveries.append(self.order_to_keyvu_delivery(o))
|
|
164
|
+
except Exception as e:
|
|
165
|
+
logger.error(f"Failed to build a delivery item for {o['order_number']}: {e}")
|
|
166
|
+
dp = KeyVuDeliveryPlan(
|
|
167
|
+
start_date=start_date,
|
|
168
|
+
end_date=end_date,
|
|
169
|
+
export_date=export_date,
|
|
170
|
+
deliveries=deliveries
|
|
171
|
+
)
|
|
172
|
+
return dp
|
|
173
|
+
|
|
174
|
+
def determine_delivery_status(self, order: dict, order_doc: dict, drop_index: int) -> DeliveryStatus:
|
|
175
|
+
match order["order_state"]:
|
|
176
|
+
case "accepted" | "assigned" | "open":
|
|
177
|
+
return DeliveryStatus.planned
|
|
178
|
+
case "canceled" | "deleted":
|
|
179
|
+
return DeliveryStatus.canceled
|
|
180
|
+
case "in progress":
|
|
181
|
+
drop_doc: dict = order_doc["drops"][drop_index]
|
|
182
|
+
route_status = drop_doc.get('route_status')
|
|
183
|
+
# TODO: Implement better state tracking here - can we tell whether this particular delivery is being
|
|
184
|
+
# loaded or not? Currently we'll just say it's planned.
|
|
185
|
+
if route_status == "driving to drop":
|
|
186
|
+
return DeliveryStatus.on_route_loaded
|
|
187
|
+
elif route_status == "arrived at drop":
|
|
188
|
+
return DeliveryStatus.unloading
|
|
189
|
+
return DeliveryStatus.planned
|
|
190
|
+
case "complete":
|
|
191
|
+
return DeliveryStatus.delivered
|
|
192
|
+
case _:
|
|
193
|
+
raise Exception(f"Could not determine delivery status from order state '{order['order_state']}'")
|
|
194
|
+
|
|
195
|
+
def get_delivery_date(self, order: dict, drop: dict, delivery_status: DeliveryStatus) -> datetime:
|
|
196
|
+
# TODO: Get the correct delivery date for all order scenarios.
|
|
197
|
+
delivery_date = None
|
|
198
|
+
if delivery_status == DeliveryStatus.planned or delivery_status == DeliveryStatus.unloading:
|
|
199
|
+
# Sometimes this is a str, sometimes it's a datetime
|
|
200
|
+
delivery_date = drop["eta"]
|
|
201
|
+
elif delivery_status == DeliveryStatus.canceled:
|
|
202
|
+
# Is this acceptable?
|
|
203
|
+
delivery_date = order.get("dispatch_window_end") or order.get("last_changed_date")
|
|
204
|
+
elif delivery_status == DeliveryStatus.delivered:
|
|
205
|
+
logger.debug("Delivered")
|
|
206
|
+
bols = self.bol_lkp.get(order["order_number"], {}).get("bols")
|
|
207
|
+
if not bols:
|
|
208
|
+
logger.warning("No BOL on order")
|
|
209
|
+
else:
|
|
210
|
+
delivery_date = max([datetime.fromisoformat(b["date"]) for b in bols])
|
|
211
|
+
if delivery_date is None:
|
|
212
|
+
logger.warning("Unable to determine a delivery date - using current datetime")
|
|
213
|
+
return datetime.now(UTC)
|
|
214
|
+
|
|
215
|
+
if type(delivery_date) is str:
|
|
216
|
+
delivery_date = datetime.fromisoformat(delivery_date)
|
|
217
|
+
return delivery_date.astimezone(UTC).replace(microsecond=0)
|
|
218
|
+
|
|
219
|
+
def drop_to_keyvu_station_delivery(self, order: dict, drop: dict, drop_index: int) -> StationDelivery | None:
|
|
220
|
+
# Confirm if the counterparty is included
|
|
221
|
+
if self.by_counterparties:
|
|
222
|
+
location = self.loc_lkp[drop["location_id"]]
|
|
223
|
+
store_cp_name = self.store_lkp[location["name"]][
|
|
224
|
+
"counterparty_name"] # location name seems to = store_number
|
|
225
|
+
if store_cp_name not in self.by_counterparties:
|
|
226
|
+
logger.debug(
|
|
227
|
+
f"Skipping order {order['order_number']} drop #{drop_index}, "
|
|
228
|
+
f"{store_cp_name} not in counterparties list"
|
|
229
|
+
)
|
|
230
|
+
return None
|
|
231
|
+
delivery_status = self.determine_delivery_status(order, self.order_lkp[order["order_number"]], drop_index)
|
|
232
|
+
|
|
233
|
+
# Map the site IDs, if available
|
|
234
|
+
if self.keyvu_site_mappings:
|
|
235
|
+
site_id = self.keyvu_site_mappings.get(drop["location_name"])
|
|
236
|
+
# If the lookup fails...
|
|
237
|
+
if not site_id:
|
|
238
|
+
# ...and we are skipping unmapped sites, skip this one
|
|
239
|
+
if self.only_mapped_sites:
|
|
240
|
+
logger.warning(
|
|
241
|
+
f"Skipping {order['order_number']} drop #{drop_index}, could not find site_id in mappings")
|
|
242
|
+
return None
|
|
243
|
+
# otherwise we can use the location name as a fallback
|
|
244
|
+
else:
|
|
245
|
+
site_id = drop["location_name"]
|
|
246
|
+
# But if not, use location name directly
|
|
247
|
+
else:
|
|
248
|
+
site_id = drop["location_name"]
|
|
249
|
+
|
|
250
|
+
def bol_correlates(bol: dict, tanks: list[int]) -> bool:
|
|
251
|
+
return bol["location_id"] == drop["location_id"] and bol["store_tank"] in tanks
|
|
252
|
+
|
|
253
|
+
allocated_bols = self.order_lkp[order["order_number"]].get("allocated_bols", [])
|
|
254
|
+
# allocated_bols will have multiple entries - one per product - if a load is split.
|
|
255
|
+
# Since KeyVu doesn't have any product details, just supplier and terminal, this results in apparent duplicates
|
|
256
|
+
# when it gets converted to their format.
|
|
257
|
+
# Convert all BOLs, but only keep them if we haven't already generated an identical BOL entry.
|
|
258
|
+
converted_bols = []
|
|
259
|
+
for bol in allocated_bols:
|
|
260
|
+
converted = self.allocated_bol_to_keyvu_bol(bol)
|
|
261
|
+
drop_tanks = [x['tank_id'] for x in drop["details"]]
|
|
262
|
+
if converted not in converted_bols and bol_correlates(bol, drop_tanks):
|
|
263
|
+
converted_bols.append(converted)
|
|
264
|
+
return StationDelivery(
|
|
265
|
+
delivery_status=delivery_status,
|
|
266
|
+
site_id=site_id,
|
|
267
|
+
details=[StationDeliveryDetails.from_v1_order_dict(x) for x in drop["details"]],
|
|
268
|
+
delivery_date=self.get_delivery_date(order, drop, delivery_status),
|
|
269
|
+
bill_of_ladings=converted_bols
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
def allocated_bol_to_keyvu_bol(self, allocated_bol: dict) -> StationDeliveryBOL:
|
|
273
|
+
return StationDeliveryBOL(
|
|
274
|
+
supplier=allocated_bol["bol_supplier"],
|
|
275
|
+
terminal_name=allocated_bol["bol_terminal"],
|
|
276
|
+
bill_of_lading_number=allocated_bol["bol_number"],
|
|
277
|
+
terminal_control_number=self.loc_lkp.get(
|
|
278
|
+
allocated_bol["bol_terminal_id"], {}).get("extra_data", {}).get(self.tcn_field_name, ""),
|
|
279
|
+
consignee="" # KeyVu says if we don't have a consignee ID ("usually some 5 digit number") to leave it empty
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
def all_drops_to_keyvu_station_deliveries(self, order: dict) -> list[StationDelivery]:
|
|
283
|
+
station_deliveries = [
|
|
284
|
+
self.drop_to_keyvu_station_delivery(order, drop, index)
|
|
285
|
+
for index, drop in enumerate(order["drops"])
|
|
286
|
+
]
|
|
287
|
+
# Filter out failed conversions (typically failed site lookups)
|
|
288
|
+
return [d for d in station_deliveries if d is not None]
|
|
289
|
+
|
|
290
|
+
def order_to_keyvu_delivery(self, order: dict) -> Delivery:
|
|
291
|
+
updated_at: datetime = datetime.fromisoformat(max(filter(
|
|
292
|
+
lambda x: x is not None,
|
|
293
|
+
[order["last_change_date"], order.get("hauled_by_updated")])
|
|
294
|
+
))
|
|
295
|
+
order_detail = self.order_lkp.get(order["order_number"], {})
|
|
296
|
+
unit = ""
|
|
297
|
+
if dsid := order_detail.get("driver_schedule_id"):
|
|
298
|
+
driver_log = self.driver_sched_lkp.get(dsid, {}).get("driver_log", {})
|
|
299
|
+
tractor = driver_log.get("tractor")
|
|
300
|
+
trailer = driver_log.get("trailer")
|
|
301
|
+
unit += tractor if tractor else ""
|
|
302
|
+
if unit and trailer:
|
|
303
|
+
unit += f"-{trailer}"
|
|
304
|
+
elif trailer:
|
|
305
|
+
unit = trailer
|
|
306
|
+
|
|
307
|
+
station_deliveries = self.all_drops_to_keyvu_station_deliveries(order)
|
|
308
|
+
if not station_deliveries:
|
|
309
|
+
raise Exception(f"No station deliveries built for order {order['order_number']}, skipping order")
|
|
310
|
+
return Delivery(
|
|
311
|
+
id=str(order["order_number"]),
|
|
312
|
+
carrier_name=order["supply_option"].get("carrier"),
|
|
313
|
+
# Grab the SCAC from the carrier counterparty, falling back to None (which becomes blank) if not found.
|
|
314
|
+
scac=self.cp_lkp.get(order_detail.get("carrier_id"), {}).get("scac"),
|
|
315
|
+
# GeoLocation must be included but we don't necessarily have any data to fill in (this would be driver
|
|
316
|
+
# breadcrumbs).
|
|
317
|
+
# This prevents geolocation from getting ignored entirely during serialization, because the schema expects
|
|
318
|
+
# it to be there, but doesn't add any sub-elements, which are optional.
|
|
319
|
+
geo_location=GeoLocation(longitude=None, latitude=None, heading=None, last_updated=None),
|
|
320
|
+
unit=unit,
|
|
321
|
+
last_updated=updated_at.astimezone(UTC).replace(microsecond=0),
|
|
322
|
+
station_deliveries=station_deliveries
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
if __name__ == "__main__":
|
|
327
|
+
import asyncio
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
async def main():
|
|
331
|
+
s = KeyVuExportStep(
|
|
332
|
+
rita_client=GravitateRitaAPI(
|
|
333
|
+
base_url="",
|
|
334
|
+
username="",
|
|
335
|
+
password="",
|
|
336
|
+
),
|
|
337
|
+
sd_client=GravitateSDAPI(
|
|
338
|
+
base_url="",
|
|
339
|
+
username="",
|
|
340
|
+
password="",
|
|
341
|
+
),
|
|
342
|
+
mongo_database=AsyncMongoClient("mongo conn str")["mongo db name"],
|
|
343
|
+
time_back=timedelta(minutes=60),
|
|
344
|
+
order_nums=[]
|
|
345
|
+
)
|
|
346
|
+
plan = await s.execute()
|
|
347
|
+
dp_string = plan.to_xml(**default_serialization_options)
|
|
348
|
+
plan_file = RawData(
|
|
349
|
+
data=dp_string,
|
|
350
|
+
file_name=f"plan_file{datetime.now().isoformat()}.xml"
|
|
351
|
+
)
|
|
352
|
+
with open(plan_file.file_name, "wb") as f:
|
|
353
|
+
f.write(plan_file.data)
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from datetime import datetime, timedelta, UTC
|
|
3
|
+
from functools import lru_cache
|
|
4
|
+
from itertools import groupby
|
|
5
|
+
from typing import Dict, Any, List, Tuple, Optional
|
|
6
|
+
|
|
7
|
+
import pytz
|
|
8
|
+
from dateutil.parser import parse
|
|
9
|
+
from more_itertools.more import chunked
|
|
10
|
+
|
|
11
|
+
from bb_integrations_lib.gravitate.pe_api import GravitatePEAPI
|
|
12
|
+
from bb_integrations_lib.gravitate.rita_api import GravitateRitaAPI
|
|
13
|
+
from bb_integrations_lib.mappers.prices.model import PricePublisher, PricingIntegrationConfig
|
|
14
|
+
from bb_integrations_lib.models.pipeline_structs import StopPipeline
|
|
15
|
+
from bb_integrations_lib.models.rita.config import GenericConfig
|
|
16
|
+
from bb_integrations_lib.protocols.pipelines import Step, ParserBase
|
|
17
|
+
from bb_integrations_lib.shared.model import PEPriceData
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ImpossibleToParseDate(Exception):
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class PEPriceExportStep(Step):
|
|
25
|
+
def __init__(self,
|
|
26
|
+
rita_client: GravitateRitaAPI,
|
|
27
|
+
pe_client: GravitatePEAPI,
|
|
28
|
+
price_publishers: list[PricePublisher],
|
|
29
|
+
parser: type[ParserBase] | None = None,
|
|
30
|
+
parser_kwargs: dict | None = None,
|
|
31
|
+
config_id: str | None = None,
|
|
32
|
+
hours_back: int = 24,
|
|
33
|
+
addl_endpoint_args: dict | None = None,
|
|
34
|
+
last_sync_date: datetime | None = datetime.now(UTC),
|
|
35
|
+
*args, **kwargs):
|
|
36
|
+
"""This step requires:
|
|
37
|
+
- tenant_name: [REQUIRED] client name. (i.e Jacksons, TTE, Coleman)
|
|
38
|
+
- price_publishers:[REQUIRED] a list of price publisher names to be included in the price request
|
|
39
|
+
- config_id: [OPTIONAL] a RITA config to pull last sync date
|
|
40
|
+
- hours_back: [OPTIONAL] hours back from last sync date, defaults to 12
|
|
41
|
+
- mode: [OPTIONAL] can be 'production' or 'development', defaults to production
|
|
42
|
+
"""
|
|
43
|
+
super().__init__(*args, **kwargs)
|
|
44
|
+
self.pe_client = pe_client
|
|
45
|
+
self.price_publishers = price_publishers
|
|
46
|
+
self.config_id = config_id
|
|
47
|
+
self.hours_back = hours_back
|
|
48
|
+
self.rita_client = rita_client
|
|
49
|
+
self.additional_endpoint_arguments = addl_endpoint_args or {}
|
|
50
|
+
self.last_sync_date = last_sync_date
|
|
51
|
+
if parser:
|
|
52
|
+
self.custom_parser = parser
|
|
53
|
+
self.custom_parser_kwargs = parser_kwargs or {}
|
|
54
|
+
|
|
55
|
+
def price_publisher_lkp(self) -> Dict[str, PricePublisher]:
|
|
56
|
+
lkp = {}
|
|
57
|
+
pp = self.price_publishers
|
|
58
|
+
for p in pp:
|
|
59
|
+
lkp[p.name] = p
|
|
60
|
+
return lkp
|
|
61
|
+
|
|
62
|
+
def get_publisher_extend_by(self, key: str) -> int | None:
|
|
63
|
+
lkp = self.price_publisher_lkp()
|
|
64
|
+
return lkp[key].extend_by_days
|
|
65
|
+
|
|
66
|
+
def get_publisher_price_type(self, key: str) -> str:
|
|
67
|
+
lkp = self.price_publisher_lkp()
|
|
68
|
+
return lkp[key].price_type
|
|
69
|
+
|
|
70
|
+
def price_type_rows(self, rows: List[PEPriceData]) -> List[PEPriceData]:
|
|
71
|
+
for row in rows:
|
|
72
|
+
price_type = self.get_publisher_price_type(row.PricePublisher)
|
|
73
|
+
row.PriceType = price_type
|
|
74
|
+
return rows
|
|
75
|
+
|
|
76
|
+
def describe(self) -> str:
|
|
77
|
+
return f"Export Pricing Engine Prices"
|
|
78
|
+
|
|
79
|
+
async def execute(self, _: Any = None) -> List[PEPriceData] | List[Dict]:
|
|
80
|
+
updated_prices = await self.get_updated_prices_for_publishers(last_sync_date=self.last_sync_date,
|
|
81
|
+
price_publishers=self.price_publishers)
|
|
82
|
+
if not updated_prices:
|
|
83
|
+
raise StopPipeline
|
|
84
|
+
updated_price_instrument_ids, min_updated_date = PEPriceExportStep.instrument_ids_and_min_date(updated_prices)
|
|
85
|
+
historic_prices_per_instrument_id = await self.get_updated_prices_for_instruments(
|
|
86
|
+
min_effective_date=min_updated_date,
|
|
87
|
+
price_publishers=self.price_publishers,
|
|
88
|
+
instrument_ids=updated_price_instrument_ids)
|
|
89
|
+
prices = self.update_historical_prices(historic_prices_per_instrument_id)
|
|
90
|
+
if not hasattr(self, "custom_parser"):
|
|
91
|
+
return prices
|
|
92
|
+
else:
|
|
93
|
+
parser = self.custom_parser(**self.custom_parser_kwargs)
|
|
94
|
+
parser_results = await parser.parse(prices)
|
|
95
|
+
return parser_results
|
|
96
|
+
|
|
97
|
+
def update_historical_prices(self, rows: List[PEPriceData]) -> List[PEPriceData]:
|
|
98
|
+
_sorted_id = sorted(rows, key=lambda r: (r.PriceInstrumentId, r.EffectiveFromDateTime), reverse=True)
|
|
99
|
+
for instrument_id, group in groupby(_sorted_id, key=lambda r: r.PriceInstrumentId):
|
|
100
|
+
group_list = PEPriceExportStep.rank_rows(list(group))
|
|
101
|
+
group_list_price_typed = self.price_type_rows(group_list)
|
|
102
|
+
max_row = max(group_list_price_typed, key=lambda r: r.EffectiveFromDateTime)
|
|
103
|
+
max_row.ExtendByDays = self.get_publisher_extend_by(max_row.PricePublisher) # makes it more configurable
|
|
104
|
+
max_row.IsLatest = True
|
|
105
|
+
return _sorted_id
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
async def get_prices(
|
|
109
|
+
self,
|
|
110
|
+
query: Dict,
|
|
111
|
+
count: int = 1000,
|
|
112
|
+
include_source_data: bool = True
|
|
113
|
+
) -> List[PEPriceData]:
|
|
114
|
+
records = []
|
|
115
|
+
payload = {
|
|
116
|
+
"Query": {**query,
|
|
117
|
+
"COUNT": count
|
|
118
|
+
},
|
|
119
|
+
"includeSourceData": include_source_data
|
|
120
|
+
}
|
|
121
|
+
resp = await self.pe_client.get_prices(payload)
|
|
122
|
+
while len(resp['Data']) > 0:
|
|
123
|
+
records.extend(resp['Data'])
|
|
124
|
+
max_sync = resp["MaxSyncResult"]
|
|
125
|
+
if max_sync is None:
|
|
126
|
+
break
|
|
127
|
+
payload["Query"]["MaxSync"] = max_sync
|
|
128
|
+
resp = await self.pe_client.get_prices(payload)
|
|
129
|
+
self.pipeline_context.included_files["Pricing Engine Export Prices Step"] = json.dumps(records)
|
|
130
|
+
return [PEPriceData.model_validate(price) for price in records]
|
|
131
|
+
|
|
132
|
+
async def get_updated_prices_for_publishers(self,
|
|
133
|
+
last_sync_date: datetime,
|
|
134
|
+
price_publishers: List[PricePublisher] = None) -> List[PEPriceData]:
|
|
135
|
+
last_sync_date = (last_sync_date - timedelta(hours=self.hours_back)).replace(tzinfo=pytz.UTC)
|
|
136
|
+
payload = {
|
|
137
|
+
"IsActiveFilterType": "ActiveOnly",
|
|
138
|
+
"PricePublisherNames": [p.name for p in (price_publishers or [])],
|
|
139
|
+
"MaxSync": {
|
|
140
|
+
"MaxSyncDateTime": last_sync_date.isoformat(),
|
|
141
|
+
"MaxSyncPkId": 0
|
|
142
|
+
},
|
|
143
|
+
**self.additional_endpoint_arguments,
|
|
144
|
+
}
|
|
145
|
+
rows = await self.get_prices(query=payload, include_source_data=True)
|
|
146
|
+
return rows
|
|
147
|
+
|
|
148
|
+
async def get_updated_prices_for_instruments(self,
|
|
149
|
+
min_effective_date: datetime,
|
|
150
|
+
instrument_ids: List[int],
|
|
151
|
+
price_publishers: List[PricePublisher] = None) -> List[PEPriceData]:
|
|
152
|
+
res_rows = []
|
|
153
|
+
for idx, group in enumerate(chunked(instrument_ids, 50)):
|
|
154
|
+
payload = {
|
|
155
|
+
"IsActiveFilterType": "ActiveOnly",
|
|
156
|
+
"PricePublisherNames": [p.name for p in (price_publishers or [])],
|
|
157
|
+
"MinEffectiveDate": min_effective_date.isoformat(),
|
|
158
|
+
"PriceInstrumentIds": group
|
|
159
|
+
}
|
|
160
|
+
rows = await self.get_prices(query=payload, include_source_data=True)
|
|
161
|
+
res_rows.extend(rows)
|
|
162
|
+
return res_rows
|
|
163
|
+
|
|
164
|
+
@staticmethod
|
|
165
|
+
def instrument_ids_and_min_date(rows: List[PEPriceData]) -> Tuple[list, datetime]:
|
|
166
|
+
unique_price_instrument_ids = list(set([r.PriceInstrumentId for r in rows]))
|
|
167
|
+
min_date = min([PEPriceExportStep.try_to_parse_date(r.EffectiveFromDateTime) for r in rows])
|
|
168
|
+
return unique_price_instrument_ids, min_date
|
|
169
|
+
|
|
170
|
+
@staticmethod
|
|
171
|
+
def rank_rows(rows: List[PEPriceData]) -> List[PEPriceData]:
|
|
172
|
+
for idx, row in enumerate(rows):
|
|
173
|
+
row.Rank = idx + 1
|
|
174
|
+
return rows
|
|
175
|
+
|
|
176
|
+
@staticmethod
|
|
177
|
+
def try_to_parse_date(dt_string: str) -> datetime:
|
|
178
|
+
if isinstance(dt_string, str):
|
|
179
|
+
try:
|
|
180
|
+
parsed_datetime = parse(dt_string)
|
|
181
|
+
return parsed_datetime
|
|
182
|
+
except (ValueError, TypeError):
|
|
183
|
+
raise ImpossibleToParseDate(f"Could not parse date: {dt_string}")
|
|
184
|
+
elif isinstance(dt_string, datetime):
|
|
185
|
+
return dt_string
|
|
186
|
+
else:
|
|
187
|
+
raise ImpossibleToParseDate(f"Could not parse date: {dt_string} -> Format not supported")
|
|
188
|
+
|
|
189
|
+
@staticmethod
|
|
190
|
+
def check_if_date_bigger_equal_previous_weekday(date: str) -> bool:
|
|
191
|
+
parsed = parse(date).replace(tzinfo=pytz.UTC)
|
|
192
|
+
_weekday = PEPriceExportStep.previous_weekday()
|
|
193
|
+
return parsed >= _weekday
|
|
194
|
+
|
|
195
|
+
@staticmethod
|
|
196
|
+
@lru_cache(maxsize=1)
|
|
197
|
+
def previous_weekday(anchor: Optional[datetime] = None) -> datetime:
|
|
198
|
+
if anchor is None:
|
|
199
|
+
anchor = datetime.now(UTC)
|
|
200
|
+
anchor = anchor.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
201
|
+
current_weekday = anchor.weekday()
|
|
202
|
+
if current_weekday == 0: # Monday
|
|
203
|
+
days_back = 3 # Go back to Friday
|
|
204
|
+
elif current_weekday == 6: # Sunday
|
|
205
|
+
days_back = 2 # Go back to Friday
|
|
206
|
+
else: # Tuesday through Saturday
|
|
207
|
+
days_back = 1 # Go back one day
|
|
208
|
+
return anchor - timedelta(days=days_back)
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
async def load_config(rita_client: GravitateRitaAPI, environment: str) -> Tuple[PricingIntegrationConfig, str]:
|
|
212
|
+
config_name = f"{environment} Pricing Engine Contract Integration"
|
|
213
|
+
configs = await rita_client.get_config_by_name(bucket_path="/Prices", config_name=config_name)
|
|
214
|
+
job_config: GenericConfig = configs[config_name]
|
|
215
|
+
pipeline_config: PricingIntegrationConfig = PricingIntegrationConfig.model_validate(job_config.config)
|
|
216
|
+
return pipeline_config, job_config.config_id
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
if __name__ == "__main__":
|
|
220
|
+
async def main():
|
|
221
|
+
rita_client = GravitateRitaAPI(
|
|
222
|
+
base_url="",
|
|
223
|
+
client_id="",
|
|
224
|
+
client_secret=""
|
|
225
|
+
)
|
|
226
|
+
config, config_id = await load_config(environment="Loves", rita_client=rita_client)
|
|
227
|
+
s = PEPriceExportStep(
|
|
228
|
+
rita_client=rita_client,
|
|
229
|
+
pe_client=GravitatePEAPI(
|
|
230
|
+
base_url="",
|
|
231
|
+
username="",
|
|
232
|
+
password=""
|
|
233
|
+
),
|
|
234
|
+
price_publishers=config.price_publishers,
|
|
235
|
+
config_id=config_id,
|
|
236
|
+
hours_back=24,
|
|
237
|
+
)
|
|
238
|
+
await s.execute()
|