bb-integrations-library 3.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bb_integrations_lib/__init__.py +0 -0
- bb_integrations_lib/converters/__init__.py +0 -0
- bb_integrations_lib/gravitate/__init__.py +0 -0
- bb_integrations_lib/gravitate/base_api.py +20 -0
- bb_integrations_lib/gravitate/model.py +29 -0
- bb_integrations_lib/gravitate/pe_api.py +122 -0
- bb_integrations_lib/gravitate/rita_api.py +552 -0
- bb_integrations_lib/gravitate/sd_api.py +572 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
- bb_integrations_lib/gravitate/testing/__init__.py +0 -0
- bb_integrations_lib/gravitate/testing/builder.py +55 -0
- bb_integrations_lib/gravitate/testing/openapi.py +70 -0
- bb_integrations_lib/gravitate/testing/util.py +274 -0
- bb_integrations_lib/mappers/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/model.py +106 -0
- bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
- bb_integrations_lib/mappers/prices/protocol.py +20 -0
- bb_integrations_lib/mappers/prices/util.py +61 -0
- bb_integrations_lib/mappers/rita_mapper.py +523 -0
- bb_integrations_lib/models/__init__.py +0 -0
- bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
- bb_integrations_lib/models/enums.py +28 -0
- bb_integrations_lib/models/pipeline_structs.py +76 -0
- bb_integrations_lib/models/probe/probe_event.py +20 -0
- bb_integrations_lib/models/probe/request_data.py +431 -0
- bb_integrations_lib/models/probe/resume_token.py +7 -0
- bb_integrations_lib/models/rita/audit.py +113 -0
- bb_integrations_lib/models/rita/auth.py +30 -0
- bb_integrations_lib/models/rita/bucket.py +17 -0
- bb_integrations_lib/models/rita/config.py +188 -0
- bb_integrations_lib/models/rita/constants.py +19 -0
- bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
- bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
- bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
- bb_integrations_lib/models/rita/crossroads_network.py +41 -0
- bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
- bb_integrations_lib/models/rita/email.py +39 -0
- bb_integrations_lib/models/rita/issue.py +63 -0
- bb_integrations_lib/models/rita/mapping.py +227 -0
- bb_integrations_lib/models/rita/probe.py +58 -0
- bb_integrations_lib/models/rita/reference_data.py +110 -0
- bb_integrations_lib/models/rita/source_system.py +9 -0
- bb_integrations_lib/models/rita/workers.py +76 -0
- bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
- bb_integrations_lib/models/sd/get_order.py +301 -0
- bb_integrations_lib/models/sd/orders.py +18 -0
- bb_integrations_lib/models/sd_api.py +115 -0
- bb_integrations_lib/pipelines/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
- bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
- bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
- bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
- bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
- bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
- bb_integrations_lib/pipelines/shared/__init__.py +0 -0
- bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
- bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
- bb_integrations_lib/pipelines/steps/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
- bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
- bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
- bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
- bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
- bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
- bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
- bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
- bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
- bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
- bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
- bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
- bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
- bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
- bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
- bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
- bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
- bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
- bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
- bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
- bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
- bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
- bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
- bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
- bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
- bb_integrations_lib/pipelines/steps/null_step.py +15 -0
- bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
- bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
- bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
- bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
- bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
- bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
- bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
- bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
- bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
- bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
- bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
- bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
- bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
- bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
- bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
- bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
- bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
- bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
- bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
- bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
- bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
- bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
- bb_integrations_lib/protocols/__init__.py +0 -0
- bb_integrations_lib/protocols/flat_file.py +210 -0
- bb_integrations_lib/protocols/gravitate_client.py +104 -0
- bb_integrations_lib/protocols/pipelines.py +697 -0
- bb_integrations_lib/provider/__init__.py +0 -0
- bb_integrations_lib/provider/api/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/client.py +43 -0
- bb_integrations_lib/provider/api/cargas/model.py +49 -0
- bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
- bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
- bb_integrations_lib/provider/api/dtn/client.py +128 -0
- bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
- bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
- bb_integrations_lib/provider/api/keyvu/client.py +30 -0
- bb_integrations_lib/provider/api/keyvu/model.py +149 -0
- bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/macropoint/client.py +28 -0
- bb_integrations_lib/provider/api/macropoint/model.py +40 -0
- bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
- bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
- bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
- bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
- bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
- bb_integrations_lib/provider/api/platform_science/client.py +147 -0
- bb_integrations_lib/provider/api/platform_science/model.py +82 -0
- bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
- bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
- bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/telapoint/client.py +68 -0
- bb_integrations_lib/provider/api/telapoint/model.py +178 -0
- bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
- bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
- bb_integrations_lib/provider/aws/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/client.py +126 -0
- bb_integrations_lib/provider/ftp/__init__.py +0 -0
- bb_integrations_lib/provider/ftp/client.py +140 -0
- bb_integrations_lib/provider/ftp/interface.py +273 -0
- bb_integrations_lib/provider/ftp/model.py +76 -0
- bb_integrations_lib/provider/imap/__init__.py +0 -0
- bb_integrations_lib/provider/imap/client.py +228 -0
- bb_integrations_lib/provider/imap/model.py +3 -0
- bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
- bb_integrations_lib/provider/sqlserver/client.py +106 -0
- bb_integrations_lib/secrets/__init__.py +4 -0
- bb_integrations_lib/secrets/adapters.py +98 -0
- bb_integrations_lib/secrets/credential_models.py +222 -0
- bb_integrations_lib/secrets/factory.py +85 -0
- bb_integrations_lib/secrets/providers.py +160 -0
- bb_integrations_lib/shared/__init__.py +0 -0
- bb_integrations_lib/shared/exceptions.py +25 -0
- bb_integrations_lib/shared/model.py +1039 -0
- bb_integrations_lib/shared/shared_enums.py +510 -0
- bb_integrations_lib/storage/README.md +236 -0
- bb_integrations_lib/storage/__init__.py +0 -0
- bb_integrations_lib/storage/aws/__init__.py +0 -0
- bb_integrations_lib/storage/aws/s3.py +8 -0
- bb_integrations_lib/storage/defaults.py +72 -0
- bb_integrations_lib/storage/gcs/__init__.py +0 -0
- bb_integrations_lib/storage/gcs/client.py +8 -0
- bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
- bb_integrations_lib/storage/gcsmanager/client.py +8 -0
- bb_integrations_lib/storage/setup.py +29 -0
- bb_integrations_lib/util/__init__.py +0 -0
- bb_integrations_lib/util/cache/__init__.py +0 -0
- bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
- bb_integrations_lib/util/cache/protocol.py +9 -0
- bb_integrations_lib/util/config/__init__.py +0 -0
- bb_integrations_lib/util/config/manager.py +391 -0
- bb_integrations_lib/util/config/model.py +41 -0
- bb_integrations_lib/util/exception_logger/__init__.py +0 -0
- bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
- bb_integrations_lib/util/exception_logger/test.py +114 -0
- bb_integrations_lib/util/utils.py +364 -0
- bb_integrations_lib/workers/__init__.py +0 -0
- bb_integrations_lib/workers/groups.py +13 -0
- bb_integrations_lib/workers/rpc_worker.py +50 -0
- bb_integrations_lib/workers/topics.py +20 -0
- bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
- bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
- bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
|
File without changes
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from time import sleep
|
|
3
|
+
from typing import Dict, List, cast
|
|
4
|
+
|
|
5
|
+
from loguru import logger
|
|
6
|
+
|
|
7
|
+
from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
|
|
8
|
+
from bb_integrations_lib.models.pipeline_structs import BBDUploadResult
|
|
9
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
10
|
+
from bb_integrations_lib.util.utils import CustomJSONEncoder
|
|
11
|
+
from bb_integrations_lib.util.config.manager import GlobalConfigManager
|
|
12
|
+
from bb_integrations_lib.util.config.model import GlobalConfig
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class BBDUploadAccessorialsStep(Step[List, BBDUploadResult, None]):
|
|
16
|
+
|
|
17
|
+
def __init__(self, step_configuration: Dict[str, str]):
|
|
18
|
+
super().__init__(step_configuration)
|
|
19
|
+
self.config_manager = GlobalConfigManager()
|
|
20
|
+
self.env_mode = step_configuration.get('mode', "production")
|
|
21
|
+
self.buffer = step_configuration.get('buffer', 0.5)
|
|
22
|
+
self.chuk_size = step_configuration.get('chuk_size', 1000)
|
|
23
|
+
if 'env' in step_configuration:
|
|
24
|
+
self.bbd_client = GravitateSDAPI.from_config(step_configuration["env"],
|
|
25
|
+
step_configuration["bbd_username"],
|
|
26
|
+
step_configuration["bbd_password"])
|
|
27
|
+
self.bbd_client.username = step_configuration["bbd_username"]
|
|
28
|
+
self.bbd_client.password = step_configuration["bbd_password"]
|
|
29
|
+
elif 'tenant_name' in step_configuration:
|
|
30
|
+
self.secret_data: GlobalConfig = self.config_manager.get_environment(step_configuration["tenant_name"])
|
|
31
|
+
if self.env_mode == 'production':
|
|
32
|
+
self.bbd_client = cast(GravitateSDAPI,
|
|
33
|
+
self.config_manager.environment_from_name(step_configuration["tenant_name"],
|
|
34
|
+
"production",
|
|
35
|
+
sd_basic_auth=True).sd.api_client)
|
|
36
|
+
else:
|
|
37
|
+
logger.debug("Initializing API in dev mode")
|
|
38
|
+
self.bbd_client = cast(GravitateSDAPI,
|
|
39
|
+
self.config_manager.environment_from_name(step_configuration["tenant_name"],
|
|
40
|
+
"test",
|
|
41
|
+
sd_basic_auth=True).sd.api_client)
|
|
42
|
+
|
|
43
|
+
else:
|
|
44
|
+
raise Exception("env or tenant is required")
|
|
45
|
+
|
|
46
|
+
def describe(self) -> str:
|
|
47
|
+
return "Upload Accessorials to BBD"
|
|
48
|
+
|
|
49
|
+
async def execute(self, accessorials: List[Dict]) -> BBDUploadResult:
|
|
50
|
+
logs = {"requests": [], "responses": [], "errors": []}
|
|
51
|
+
try:
|
|
52
|
+
total_accessorials = len(accessorials)
|
|
53
|
+
succeeded = []
|
|
54
|
+
failed_items = []
|
|
55
|
+
|
|
56
|
+
for idx, accessorial in enumerate(accessorials):
|
|
57
|
+
logs["requests"].append(accessorial)
|
|
58
|
+
try:
|
|
59
|
+
resp = await self.bbd_client.call_ep("freight/accessorial/automatic/rate/create", json=accessorial)
|
|
60
|
+
resp.raise_for_status()
|
|
61
|
+
_json = resp.json()
|
|
62
|
+
sleep(self.buffer)
|
|
63
|
+
succeeded.append(accessorial)
|
|
64
|
+
logger.info(f"Accessorials uploaded successfully: {idx + 1} of {total_accessorials}")
|
|
65
|
+
logs["responses"].append({"response": _json, "request": accessorial})
|
|
66
|
+
except Exception as e:
|
|
67
|
+
logs["errors"].append({"record": accessorial, "error": f"Error uploading accessorials: {str(e)} {e.response.content}"})
|
|
68
|
+
failed_items.append(accessorial)
|
|
69
|
+
continue
|
|
70
|
+
|
|
71
|
+
self.pipeline_context.included_files["accessorials data upload"] = json.dumps(logs, cls=CustomJSONEncoder)
|
|
72
|
+
return BBDUploadResult(
|
|
73
|
+
succeeded=len(succeeded),
|
|
74
|
+
failed=len(failed_items),
|
|
75
|
+
succeeded_items=succeeded
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
except Exception as e:
|
|
79
|
+
logger.exception(f"Unable to upload | {e}")
|
|
80
|
+
raise e
|
|
File without changes
|
bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from datetime import datetime, UTC
|
|
2
|
+
from io import BytesIO
|
|
3
|
+
from typing import Tuple
|
|
4
|
+
|
|
5
|
+
import pandas as pd
|
|
6
|
+
|
|
7
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
8
|
+
from bb_integrations_lib.shared.model import RawData
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class DistributionReportDfToRawData(Step):
|
|
12
|
+
def __init__(self, file_base_name: str, file_name_date_format: str, *args, **kwargs):
|
|
13
|
+
super().__init__(*args, **kwargs)
|
|
14
|
+
self.file_base_name = file_base_name
|
|
15
|
+
self.file_name_date_format = file_name_date_format
|
|
16
|
+
|
|
17
|
+
def describe(self) -> str:
|
|
18
|
+
return "Distribution Report Dataframe to File"
|
|
19
|
+
|
|
20
|
+
@property
|
|
21
|
+
def file_name(self) -> str:
|
|
22
|
+
return f"{self.file_base_name}_{datetime.now(UTC).strftime(self.file_name_date_format)}.xlsx"
|
|
23
|
+
|
|
24
|
+
async def execute(self, data: Tuple[pd.DataFrame, pd.DataFrame]) -> RawData:
|
|
25
|
+
buff = BytesIO()
|
|
26
|
+
df_summary, df_detailed = data
|
|
27
|
+
with pd.ExcelWriter(buff, engine='openpyxl') as writer:
|
|
28
|
+
df_summary.to_excel(writer, sheet_name='Summary', index=False)
|
|
29
|
+
df_detailed.to_excel(writer, sheet_name='Details', index=False)
|
|
30
|
+
buff.seek(0)
|
|
31
|
+
return RawData(data=buff.read(), file_name=self.file_name)
|
|
32
|
+
|
|
33
|
+
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from datetime import datetime, timedelta, UTC
|
|
3
|
+
|
|
4
|
+
from pymongo.synchronous.database import Database
|
|
5
|
+
|
|
6
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
7
|
+
from bb_integrations_lib.util.utils import CustomJSONEncoder
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class GetModelHistoryStep(Step):
|
|
11
|
+
def __init__(self, mongo_database: Database, hours_back: float | None = None, include_model_mode: str = "latest_only", *args, **kwargs):
|
|
12
|
+
super().__init__(*args, **kwargs)
|
|
13
|
+
self.mongo_database = mongo_database
|
|
14
|
+
self.hours_back = hours_back
|
|
15
|
+
self.include_model_mode = include_model_mode
|
|
16
|
+
|
|
17
|
+
def describe(self) -> str:
|
|
18
|
+
return f"Get model history for {self.tenant_name}"
|
|
19
|
+
|
|
20
|
+
async def execute(self, i: None) -> str | list:
|
|
21
|
+
if self.include_model_mode == "latest_only":
|
|
22
|
+
return self.get_latest_model_id()
|
|
23
|
+
else:
|
|
24
|
+
if self.hours_back is not None:
|
|
25
|
+
return self.get_last_n_models(int(self.hours_back))
|
|
26
|
+
else:
|
|
27
|
+
raise NotImplementedError("Please specify n_hours_back in the configuration")
|
|
28
|
+
|
|
29
|
+
def get_latest_model_id(self) -> str:
|
|
30
|
+
collection = self.mongo_database["model_history"]
|
|
31
|
+
latest_model = collection.find_one(
|
|
32
|
+
{'status': 'Success'},
|
|
33
|
+
sort=[('time_ran', -1)]
|
|
34
|
+
)
|
|
35
|
+
latest_model_id = str(latest_model['_id'])
|
|
36
|
+
self.pipeline_context.included_files[f'{self.__class__.__name__} result'] = json.dumps(latest_model, cls=CustomJSONEncoder)
|
|
37
|
+
self.pipeline_context.extra_data["latest_model_id"] = latest_model_id
|
|
38
|
+
return latest_model
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def get_last_n_models(self, n: int) -> list:
|
|
42
|
+
n_hours_ago = datetime.now(UTC) - timedelta(hours=n)
|
|
43
|
+
collection = self.mongo_database["model_history"]
|
|
44
|
+
models = collection.find(
|
|
45
|
+
{'status': 'Success', 'time_ran': {'$gte': n_hours_ago}},
|
|
46
|
+
sort=[('time_ran', -1)]
|
|
47
|
+
)
|
|
48
|
+
models = list(models)
|
|
49
|
+
self.pipeline_context.included_files[f'{self.__class__.__name__} result'] = json.dumps(models, cls=CustomJSONEncoder)
|
|
50
|
+
return models
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
|
|
3
|
+
from loguru import logger
|
|
4
|
+
from pandas.core.interchange.dataframe_protocol import DataFrame
|
|
5
|
+
|
|
6
|
+
from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
|
|
7
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class GetOrderBySiteProductStep(Step):
|
|
11
|
+
def __init__(self, sd_client: GravitateSDAPI, include_model_mode: str = "latest_only", state: str = "accepted",
|
|
12
|
+
start_date: datetime | None = None, end_date: datetime | None = None, *args, **kwargs):
|
|
13
|
+
super().__init__(*args, **kwargs)
|
|
14
|
+
self.sd_client = sd_client
|
|
15
|
+
self.include_model_mode = include_model_mode
|
|
16
|
+
self.state = state
|
|
17
|
+
self.start_date = start_date
|
|
18
|
+
self.end_date = end_date
|
|
19
|
+
|
|
20
|
+
def describe(self) -> str:
|
|
21
|
+
return f"Get orders by site product"
|
|
22
|
+
|
|
23
|
+
async def execute(self, latest_model: dict | list) -> str | DataFrame:
|
|
24
|
+
if "orders_by_site_product" not in self.pipeline_context.extra_data:
|
|
25
|
+
self.pipeline_context.extra_data["orders_by_site_product"] = []
|
|
26
|
+
if isinstance(latest_model, dict):
|
|
27
|
+
return await self.get_orders_in_model_id(str(latest_model["_id"]))
|
|
28
|
+
elif isinstance(latest_model, list):
|
|
29
|
+
return [await self.get_orders_in_model_id(str(model["_id"])) for model in latest_model]
|
|
30
|
+
else:
|
|
31
|
+
raise ValueError("latest_model must be a dict or a list of dicts")
|
|
32
|
+
|
|
33
|
+
async def get_orders_in_model_id(self, model_id: str, market: str | None = None) -> str | DataFrame:
|
|
34
|
+
filter = self.build_filter()
|
|
35
|
+
json_data = {
|
|
36
|
+
'filter': {"lp_relationship.solver_id": str(model_id)
|
|
37
|
+
},
|
|
38
|
+
'market': market if market else "",
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
response = await self.sd_client.call_ep(url="order/export_by_site_product",
|
|
42
|
+
json=json_data
|
|
43
|
+
)
|
|
44
|
+
orders = response.content.decode("utf-8")
|
|
45
|
+
if not hasattr(self, "custom_parser"):
|
|
46
|
+
response = orders
|
|
47
|
+
else:
|
|
48
|
+
logger.info(f"Using custom parser for {self.__class__.__name__}")
|
|
49
|
+
parser = self.custom_parser()
|
|
50
|
+
response = await parser.parse(orders)
|
|
51
|
+
self.pipeline_context.extra_data["orders_by_site_product"].append(response)
|
|
52
|
+
return response
|
|
53
|
+
|
|
54
|
+
def build_filter(self):
|
|
55
|
+
filter = {
|
|
56
|
+
"state": self.state,
|
|
57
|
+
}
|
|
58
|
+
if self.start_date:
|
|
59
|
+
filter["From"] = self.start_date.isoformat()
|
|
60
|
+
if self.end_date:
|
|
61
|
+
filter["To"] = self.end_date.isoformat()
|
|
62
|
+
return filter
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Dict, List
|
|
3
|
+
|
|
4
|
+
from pymongo.synchronous.database import Database
|
|
5
|
+
|
|
6
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
7
|
+
from bb_integrations_lib.util.utils import init_db, CustomJSONEncoder
|
|
8
|
+
from loguru import logger
|
|
9
|
+
from pandas import DataFrame
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class GetTankConfigsStep(Step):
|
|
13
|
+
def __init__(self, mongo_database: Database, include_model_mode: str = "latest_only", *args, **kwargs):
|
|
14
|
+
super().__init__(*args, **kwargs)
|
|
15
|
+
self.database = mongo_database
|
|
16
|
+
self.include_model_mode = include_model_mode
|
|
17
|
+
|
|
18
|
+
def describe(self) -> str:
|
|
19
|
+
return f"Get tank configs for {self.tenant_name}"
|
|
20
|
+
|
|
21
|
+
async def execute(self, i: None) -> str | DataFrame:
|
|
22
|
+
return await self.get_tank_configs()
|
|
23
|
+
|
|
24
|
+
async def get_tank_configs(self) -> List[Dict] | DataFrame:
|
|
25
|
+
dos_columns = ['store_number', 'product', 'daily_lifting_estimate', 'measured_inventory']
|
|
26
|
+
collection = self.database["tank_config"]
|
|
27
|
+
tank_configs = collection.find(
|
|
28
|
+
{},
|
|
29
|
+
{col: 1 for col in dos_columns}
|
|
30
|
+
)
|
|
31
|
+
tank_configs = list(tank_configs)
|
|
32
|
+
self.pipeline_context.included_files[f'{self.__class__.__name__} result'] = json.dumps(tank_configs, cls=CustomJSONEncoder)
|
|
33
|
+
if not hasattr(self, "custom_parser"):
|
|
34
|
+
tc = tank_configs
|
|
35
|
+
else:
|
|
36
|
+
logger.info(f"Using custom parser for {self.__class__.__name__}")
|
|
37
|
+
parser = self.custom_parser()
|
|
38
|
+
tc = await parser.parse(tank_configs)
|
|
39
|
+
self.pipeline_context.extra_data["tank_configs"] = tc
|
|
40
|
+
return tc
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
from datetime import datetime, UTC
|
|
2
|
+
from typing import Dict, Tuple
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class JoinDistributionOrderDosStep(Step):
|
|
9
|
+
def __init__(self, client_name: str, *args, **kwargs):
|
|
10
|
+
super().__init__(*args, **kwargs)
|
|
11
|
+
self.client_name = client_name
|
|
12
|
+
|
|
13
|
+
def describe(self) -> str:
|
|
14
|
+
return "Join Distribution Order with DOS"
|
|
15
|
+
|
|
16
|
+
async def execute(self, latest_model: dict | list) -> Tuple[pd.DataFrame, pd.DataFrame]:
|
|
17
|
+
tc_df = self.pipeline_context.extra_data["tank_configs"]
|
|
18
|
+
orders_data = self.pipeline_context.extra_data["orders_by_site_product"]
|
|
19
|
+
|
|
20
|
+
if isinstance(latest_model, dict):
|
|
21
|
+
pivot_df = orders_data
|
|
22
|
+
df_detailed = pd.merge(
|
|
23
|
+
pivot_df,
|
|
24
|
+
tc_df,
|
|
25
|
+
how='left',
|
|
26
|
+
left_on=['site', 'component_product'],
|
|
27
|
+
right_on=['store_number', 'product'],
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
df_detailed['dos_bucket'] = df_detailed['dos_bucket'].fillna('N/A')
|
|
31
|
+
df_summary = df_detailed.groupby(['dos_bucket'])[
|
|
32
|
+
['component_volume_contract', 'component_volume_rack']].sum().reset_index()
|
|
33
|
+
df_detailed = df_detailed.groupby(['dos_bucket', 'component_product'])[
|
|
34
|
+
['component_volume_contract', 'component_volume_rack']].sum().reset_index()
|
|
35
|
+
|
|
36
|
+
df_summary = self.contract_rack_split(df_summary, latest_model)
|
|
37
|
+
df_detailed = self.contract_rack_split(df_detailed, latest_model)
|
|
38
|
+
|
|
39
|
+
return df_summary, df_detailed
|
|
40
|
+
else:
|
|
41
|
+
all_summaries = []
|
|
42
|
+
all_details = []
|
|
43
|
+
|
|
44
|
+
for i, model in enumerate(latest_model):
|
|
45
|
+
pivot_df = orders_data[i]
|
|
46
|
+
|
|
47
|
+
df_detailed = pd.merge(
|
|
48
|
+
pivot_df,
|
|
49
|
+
tc_df,
|
|
50
|
+
how='left',
|
|
51
|
+
left_on=['site', 'component_product'],
|
|
52
|
+
right_on=['store_number', 'product'],
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
df_detailed['dos_bucket'] = df_detailed['dos_bucket'].fillna('N/A')
|
|
56
|
+
df_summary = df_detailed.groupby(['dos_bucket'])[
|
|
57
|
+
['component_volume_contract', 'component_volume_rack']].sum().reset_index()
|
|
58
|
+
df_detailed_agg = df_detailed.groupby(['dos_bucket', 'component_product'])[
|
|
59
|
+
['component_volume_contract', 'component_volume_rack']].sum().reset_index()
|
|
60
|
+
|
|
61
|
+
df_summary = self.contract_rack_split(df_summary, model)
|
|
62
|
+
df_detailed_agg = self.contract_rack_split(df_detailed_agg, model)
|
|
63
|
+
|
|
64
|
+
all_summaries.append(df_summary)
|
|
65
|
+
all_details.append(df_detailed_agg)
|
|
66
|
+
|
|
67
|
+
combined_summary = pd.concat(all_summaries, ignore_index=True)
|
|
68
|
+
combined_detailed = pd.concat(all_details, ignore_index=True)
|
|
69
|
+
|
|
70
|
+
return combined_summary, combined_detailed
|
|
71
|
+
|
|
72
|
+
def contract_rack_split(self, df, latest_model):
|
|
73
|
+
markets = latest_model['markets']
|
|
74
|
+
_id = str(latest_model['_id'])
|
|
75
|
+
time_ran = latest_model['time_ran']
|
|
76
|
+
|
|
77
|
+
df['total'] = df['component_volume_contract'] + df['component_volume_rack']
|
|
78
|
+
df['pct_contract'] = df['component_volume_contract'] / df['total']
|
|
79
|
+
df['pct_rack'] = df['component_volume_rack'] / df['total']
|
|
80
|
+
df['ingested_at'] = datetime.now(UTC).replace(tzinfo=None)
|
|
81
|
+
df['markets'] = markets
|
|
82
|
+
df['model_id'] = _id
|
|
83
|
+
df['run_time'] = time_ran
|
|
84
|
+
df['client_name'] = self.client_name
|
|
85
|
+
return df
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from typing import Dict, Tuple
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from google.oauth2 import service_account
|
|
4
|
+
from loguru import logger
|
|
5
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
6
|
+
from bb_integrations_lib.provider.gcp.model import GoogleCredential
|
|
7
|
+
from bb_integrations_lib.util.utils import load_credentials
|
|
8
|
+
import pandas_gbq
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class UploadDistributionReportToBigQuery(Step):
|
|
12
|
+
def __init__(self, gbq_table_details: str, gbq_table_summary: str, google_project_id: str, *args, **kwargs):
|
|
13
|
+
super().__init__(*args, **kwargs)
|
|
14
|
+
self.gbq_table_details = gbq_table_details
|
|
15
|
+
self.gbq_table_summary = gbq_table_summary
|
|
16
|
+
self.google_project_id = google_project_id
|
|
17
|
+
|
|
18
|
+
def describe(self) -> str:
|
|
19
|
+
return "Upload Distribution Report to GBQ"
|
|
20
|
+
|
|
21
|
+
@property
|
|
22
|
+
def credentials(self) -> GoogleCredential:
|
|
23
|
+
return load_credentials(credential_type="google.credentials")
|
|
24
|
+
|
|
25
|
+
async def execute(self, data: Tuple[pd.DataFrame, pd.DataFrame]) -> None:
|
|
26
|
+
credentials = service_account.Credentials.from_service_account_info(self.credentials.model_dump())
|
|
27
|
+
df_summary, df_detailed = data
|
|
28
|
+
try:
|
|
29
|
+
pandas_gbq.to_gbq(
|
|
30
|
+
df_summary,
|
|
31
|
+
destination_table=self.gbq_table_summary,
|
|
32
|
+
project_id=self.google_project_id,
|
|
33
|
+
if_exists='append',
|
|
34
|
+
credentials=credentials,
|
|
35
|
+
)
|
|
36
|
+
pandas_gbq.to_gbq(
|
|
37
|
+
df_detailed,
|
|
38
|
+
destination_table='bb_reporting.contract_rack_util_product_detail',
|
|
39
|
+
project_id=self.google_project_id,
|
|
40
|
+
if_exists='append',
|
|
41
|
+
credentials=credentials,
|
|
42
|
+
)
|
|
43
|
+
except Exception as e:
|
|
44
|
+
logger.error(f"Failed to upload distribution report to BigQuery: {e}")
|
|
45
|
+
raise e
|
|
46
|
+
|
|
47
|
+
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from bb_integrations_lib.protocols.pipelines import Step, Input
|
|
2
|
+
from loguru import logger
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class EchoStep(Step[Input, Input]):
|
|
6
|
+
def __init__(self, *args, **kwargs):
|
|
7
|
+
super().__init__(*args, **kwargs)
|
|
8
|
+
|
|
9
|
+
def describe(self):
|
|
10
|
+
return "Echo step input at debug priority"
|
|
11
|
+
|
|
12
|
+
async def execute(self, i: Input) -> Input:
|
|
13
|
+
logger.debug(i)
|
|
14
|
+
return i
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from typing import Callable
|
|
2
|
+
|
|
3
|
+
import pandas as pd
|
|
4
|
+
import polars as pl
|
|
5
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
6
|
+
from bb_integrations_lib.shared.model import RawData
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ExportDataFrameToRawDataStep(Step):
|
|
10
|
+
def __init__(self, pandas_export_function: str, pandas_export_kwargs: dict,
|
|
11
|
+
file_name: str | Callable[[], str], *args, **kwargs):
|
|
12
|
+
super().__init__(*args, **kwargs)
|
|
13
|
+
self.pandas_export_function = pandas_export_function
|
|
14
|
+
self.pandas_export_kwargs = pandas_export_kwargs
|
|
15
|
+
self.file_name = file_name
|
|
16
|
+
|
|
17
|
+
def _get_file_name(self) -> str:
|
|
18
|
+
if callable(self.file_name):
|
|
19
|
+
return self.file_name()
|
|
20
|
+
else:
|
|
21
|
+
return self.file_name
|
|
22
|
+
|
|
23
|
+
def describe(self) -> str:
|
|
24
|
+
return "Export a DataFrame to a file wrapped in a RawData object"
|
|
25
|
+
|
|
26
|
+
async def execute(self, i: pd.DataFrame | pl.DataFrame) -> RawData:
|
|
27
|
+
func = getattr(i, self.pandas_export_function)
|
|
28
|
+
return RawData(data=func(**self.pandas_export_kwargs).encode("utf-8"), file_name=self._get_file_name())
|
|
File without changes
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
from io import BytesIO
|
|
2
|
+
from loguru import logger
|
|
3
|
+
|
|
4
|
+
import pandas as pd
|
|
5
|
+
|
|
6
|
+
from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
|
|
7
|
+
from bb_integrations_lib.protocols.pipelines import Step, ParserBase, Input, Output
|
|
8
|
+
from bb_integrations_lib.shared.model import RawData
|
|
9
|
+
from pandas import DataFrame
|
|
10
|
+
from datetime import datetime, date, UTC
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class BBDExportPayrollStep(Step):
|
|
14
|
+
def __init__(self, sd_client: GravitateSDAPI, file_name: str, parser: type[ParserBase] | None = None,
|
|
15
|
+
parser_kwargs: dict | None = None, export_file: bool = True, target_date: str | None = None,
|
|
16
|
+
additional_ep_params: dict | None = None, *args, **kwargs):
|
|
17
|
+
"""
|
|
18
|
+
Export a payroll file from S&D.
|
|
19
|
+
|
|
20
|
+
:param export_file: If True, call the endpoint to export the file as an XLSX. If False, use the endpoint that
|
|
21
|
+
provides JSON.
|
|
22
|
+
"""
|
|
23
|
+
super().__init__(*args, **kwargs)
|
|
24
|
+
self.sd_client = sd_client
|
|
25
|
+
self.file_name = file_name
|
|
26
|
+
self.export_file_only = export_file
|
|
27
|
+
self.additional_endpoint_parameters = additional_ep_params or {
|
|
28
|
+
"status": None,
|
|
29
|
+
"driver_id": None,
|
|
30
|
+
"updated_after": None,
|
|
31
|
+
}
|
|
32
|
+
if parser:
|
|
33
|
+
self.custom_parser = parser
|
|
34
|
+
self.custom_parser_kwargs = parser_kwargs or {}
|
|
35
|
+
self.target_date = target_date
|
|
36
|
+
|
|
37
|
+
def describe(self):
|
|
38
|
+
return "Export Payroll from Supply and Dispatch"
|
|
39
|
+
|
|
40
|
+
async def execute(self, last_sync_date: datetime | None = None) -> RawData:
|
|
41
|
+
last_sync_date = self.last_sync_date(last_sync_date)
|
|
42
|
+
dt = datetime.now(UTC)
|
|
43
|
+
if self.export_file_only is True:
|
|
44
|
+
logger.info(f"Exporting payroll file only for {last_sync_date}")
|
|
45
|
+
df = await self.export_payroll_file_only(last_sync_date)
|
|
46
|
+
else:
|
|
47
|
+
logger.info(f"Exporting payroll json for {last_sync_date}")
|
|
48
|
+
df = await self.export_json(last_sync_date)
|
|
49
|
+
return RawData(data=df.to_csv().encode("utf-8"), file_name=f"{self.file_name}_{dt.strftime("%Y%m%d%H%M%S")}")
|
|
50
|
+
|
|
51
|
+
def last_sync_date(self, last_sync_date: datetime | None = None) -> str:
|
|
52
|
+
if last_sync_date is None:
|
|
53
|
+
if self.target_date is not None:
|
|
54
|
+
last_sync_date = self.target_date
|
|
55
|
+
elif self.pipeline_context.max_sync is not None:
|
|
56
|
+
last_sync_date = self.pipeline_context.max_sync.max_sync_date.isoformat()
|
|
57
|
+
else:
|
|
58
|
+
last_sync_date = datetime.combine(date.today(), datetime.min.time()).isoformat()
|
|
59
|
+
else:
|
|
60
|
+
last_sync_date = last_sync_date.isoformat()
|
|
61
|
+
return last_sync_date
|
|
62
|
+
|
|
63
|
+
async def export_payroll_file_only(self, dt: datetime) -> DataFrame:
|
|
64
|
+
resp = await self.sd_client.payroll_export_file(date=dt)
|
|
65
|
+
df = pd.read_excel(BytesIO(resp.content),
|
|
66
|
+
dtype={"driver_source_id": str},
|
|
67
|
+
engine="openpyxl",
|
|
68
|
+
keep_default_na=False)
|
|
69
|
+
if df.empty:
|
|
70
|
+
raise Exception("No payroll data found")
|
|
71
|
+
if hasattr(self, "custom_parser"):
|
|
72
|
+
parser = self.custom_parser(tenant_name=self.tenant_name, **self.custom_parser_kwargs)
|
|
73
|
+
df = await parser.parse(df)
|
|
74
|
+
parser_logs = parser.get_logs()
|
|
75
|
+
self.pipeline_context.included_files["parser_logs"] = parser_logs
|
|
76
|
+
return df
|
|
77
|
+
|
|
78
|
+
async def export_json(self, dt: datetime) -> DataFrame:
|
|
79
|
+
resp = await self.sd_client.payroll_export(date=dt, **self.additional_endpoint_parameters)
|
|
80
|
+
json_resp = resp.json()
|
|
81
|
+
df = self.pre_parse_json(json_resp)
|
|
82
|
+
if df.empty:
|
|
83
|
+
raise Exception("No payroll data found")
|
|
84
|
+
return df
|
|
85
|
+
|
|
86
|
+
def pre_parse_json(self, json_resp: dict) -> DataFrame:
|
|
87
|
+
all_rows = []
|
|
88
|
+
for payroll_record in json_resp:
|
|
89
|
+
header_info = {k: v for k, v in payroll_record.items() if k != 'detail'}
|
|
90
|
+
details = payroll_record.get('detail', [])
|
|
91
|
+
if not details:
|
|
92
|
+
all_rows.append(header_info)
|
|
93
|
+
else:
|
|
94
|
+
for detail in details:
|
|
95
|
+
row = header_info.copy()
|
|
96
|
+
row.update(detail)
|
|
97
|
+
all_rows.append(row)
|
|
98
|
+
df = pd.DataFrame(all_rows)
|
|
99
|
+
datetime_columns = [
|
|
100
|
+
'start_date', 'end_date', 'updated', 'shift_start',
|
|
101
|
+
'shift_actual_start', 'shift_actual_end', 'overridden_datetime'
|
|
102
|
+
]
|
|
103
|
+
for col in datetime_columns:
|
|
104
|
+
if col in df.columns:
|
|
105
|
+
df[col] = pd.to_datetime(df[col], errors='coerce')
|
|
106
|
+
|
|
107
|
+
return df
|