bb-integrations-library 3.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (217) hide show
  1. bb_integrations_lib/__init__.py +0 -0
  2. bb_integrations_lib/converters/__init__.py +0 -0
  3. bb_integrations_lib/gravitate/__init__.py +0 -0
  4. bb_integrations_lib/gravitate/base_api.py +20 -0
  5. bb_integrations_lib/gravitate/model.py +29 -0
  6. bb_integrations_lib/gravitate/pe_api.py +122 -0
  7. bb_integrations_lib/gravitate/rita_api.py +552 -0
  8. bb_integrations_lib/gravitate/sd_api.py +572 -0
  9. bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
  10. bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
  11. bb_integrations_lib/gravitate/testing/__init__.py +0 -0
  12. bb_integrations_lib/gravitate/testing/builder.py +55 -0
  13. bb_integrations_lib/gravitate/testing/openapi.py +70 -0
  14. bb_integrations_lib/gravitate/testing/util.py +274 -0
  15. bb_integrations_lib/mappers/__init__.py +0 -0
  16. bb_integrations_lib/mappers/prices/__init__.py +0 -0
  17. bb_integrations_lib/mappers/prices/model.py +106 -0
  18. bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
  19. bb_integrations_lib/mappers/prices/protocol.py +20 -0
  20. bb_integrations_lib/mappers/prices/util.py +61 -0
  21. bb_integrations_lib/mappers/rita_mapper.py +523 -0
  22. bb_integrations_lib/models/__init__.py +0 -0
  23. bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
  24. bb_integrations_lib/models/enums.py +28 -0
  25. bb_integrations_lib/models/pipeline_structs.py +76 -0
  26. bb_integrations_lib/models/probe/probe_event.py +20 -0
  27. bb_integrations_lib/models/probe/request_data.py +431 -0
  28. bb_integrations_lib/models/probe/resume_token.py +7 -0
  29. bb_integrations_lib/models/rita/audit.py +113 -0
  30. bb_integrations_lib/models/rita/auth.py +30 -0
  31. bb_integrations_lib/models/rita/bucket.py +17 -0
  32. bb_integrations_lib/models/rita/config.py +188 -0
  33. bb_integrations_lib/models/rita/constants.py +19 -0
  34. bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
  35. bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
  36. bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
  37. bb_integrations_lib/models/rita/crossroads_network.py +41 -0
  38. bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
  39. bb_integrations_lib/models/rita/email.py +39 -0
  40. bb_integrations_lib/models/rita/issue.py +63 -0
  41. bb_integrations_lib/models/rita/mapping.py +227 -0
  42. bb_integrations_lib/models/rita/probe.py +58 -0
  43. bb_integrations_lib/models/rita/reference_data.py +110 -0
  44. bb_integrations_lib/models/rita/source_system.py +9 -0
  45. bb_integrations_lib/models/rita/workers.py +76 -0
  46. bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
  47. bb_integrations_lib/models/sd/get_order.py +301 -0
  48. bb_integrations_lib/models/sd/orders.py +18 -0
  49. bb_integrations_lib/models/sd_api.py +115 -0
  50. bb_integrations_lib/pipelines/__init__.py +0 -0
  51. bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
  52. bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
  53. bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
  54. bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
  55. bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
  56. bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
  57. bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
  58. bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
  59. bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
  60. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
  61. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
  62. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
  63. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
  64. bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
  65. bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
  66. bb_integrations_lib/pipelines/shared/__init__.py +0 -0
  67. bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
  68. bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
  69. bb_integrations_lib/pipelines/steps/__init__.py +0 -0
  70. bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
  71. bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
  72. bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
  73. bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
  74. bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
  75. bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
  76. bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
  77. bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
  78. bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
  79. bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
  80. bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
  81. bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
  82. bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
  83. bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
  84. bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
  85. bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
  86. bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
  87. bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
  88. bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
  89. bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
  90. bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
  91. bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
  92. bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
  93. bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
  94. bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
  95. bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
  96. bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
  97. bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
  98. bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
  99. bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
  100. bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
  101. bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
  102. bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
  103. bb_integrations_lib/pipelines/steps/null_step.py +15 -0
  104. bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
  105. bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
  106. bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
  107. bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
  108. bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
  109. bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
  110. bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
  111. bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
  112. bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
  113. bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
  114. bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
  115. bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
  116. bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
  117. bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
  118. bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
  119. bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
  120. bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
  121. bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
  122. bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
  123. bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
  124. bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
  125. bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
  126. bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
  127. bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
  128. bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
  129. bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
  130. bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
  131. bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
  132. bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
  133. bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
  134. bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
  135. bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
  136. bb_integrations_lib/protocols/__init__.py +0 -0
  137. bb_integrations_lib/protocols/flat_file.py +210 -0
  138. bb_integrations_lib/protocols/gravitate_client.py +104 -0
  139. bb_integrations_lib/protocols/pipelines.py +697 -0
  140. bb_integrations_lib/provider/__init__.py +0 -0
  141. bb_integrations_lib/provider/api/__init__.py +0 -0
  142. bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
  143. bb_integrations_lib/provider/api/cargas/client.py +43 -0
  144. bb_integrations_lib/provider/api/cargas/model.py +49 -0
  145. bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
  146. bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
  147. bb_integrations_lib/provider/api/dtn/client.py +128 -0
  148. bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
  149. bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
  150. bb_integrations_lib/provider/api/keyvu/client.py +30 -0
  151. bb_integrations_lib/provider/api/keyvu/model.py +149 -0
  152. bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
  153. bb_integrations_lib/provider/api/macropoint/client.py +28 -0
  154. bb_integrations_lib/provider/api/macropoint/model.py +40 -0
  155. bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
  156. bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
  157. bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
  158. bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
  159. bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
  160. bb_integrations_lib/provider/api/platform_science/client.py +147 -0
  161. bb_integrations_lib/provider/api/platform_science/model.py +82 -0
  162. bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
  163. bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
  164. bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
  165. bb_integrations_lib/provider/api/telapoint/client.py +68 -0
  166. bb_integrations_lib/provider/api/telapoint/model.py +178 -0
  167. bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
  168. bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
  169. bb_integrations_lib/provider/aws/__init__.py +0 -0
  170. bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
  171. bb_integrations_lib/provider/aws/s3/client.py +126 -0
  172. bb_integrations_lib/provider/ftp/__init__.py +0 -0
  173. bb_integrations_lib/provider/ftp/client.py +140 -0
  174. bb_integrations_lib/provider/ftp/interface.py +273 -0
  175. bb_integrations_lib/provider/ftp/model.py +76 -0
  176. bb_integrations_lib/provider/imap/__init__.py +0 -0
  177. bb_integrations_lib/provider/imap/client.py +228 -0
  178. bb_integrations_lib/provider/imap/model.py +3 -0
  179. bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
  180. bb_integrations_lib/provider/sqlserver/client.py +106 -0
  181. bb_integrations_lib/secrets/__init__.py +4 -0
  182. bb_integrations_lib/secrets/adapters.py +98 -0
  183. bb_integrations_lib/secrets/credential_models.py +222 -0
  184. bb_integrations_lib/secrets/factory.py +85 -0
  185. bb_integrations_lib/secrets/providers.py +160 -0
  186. bb_integrations_lib/shared/__init__.py +0 -0
  187. bb_integrations_lib/shared/exceptions.py +25 -0
  188. bb_integrations_lib/shared/model.py +1039 -0
  189. bb_integrations_lib/shared/shared_enums.py +510 -0
  190. bb_integrations_lib/storage/README.md +236 -0
  191. bb_integrations_lib/storage/__init__.py +0 -0
  192. bb_integrations_lib/storage/aws/__init__.py +0 -0
  193. bb_integrations_lib/storage/aws/s3.py +8 -0
  194. bb_integrations_lib/storage/defaults.py +72 -0
  195. bb_integrations_lib/storage/gcs/__init__.py +0 -0
  196. bb_integrations_lib/storage/gcs/client.py +8 -0
  197. bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
  198. bb_integrations_lib/storage/gcsmanager/client.py +8 -0
  199. bb_integrations_lib/storage/setup.py +29 -0
  200. bb_integrations_lib/util/__init__.py +0 -0
  201. bb_integrations_lib/util/cache/__init__.py +0 -0
  202. bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
  203. bb_integrations_lib/util/cache/protocol.py +9 -0
  204. bb_integrations_lib/util/config/__init__.py +0 -0
  205. bb_integrations_lib/util/config/manager.py +391 -0
  206. bb_integrations_lib/util/config/model.py +41 -0
  207. bb_integrations_lib/util/exception_logger/__init__.py +0 -0
  208. bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
  209. bb_integrations_lib/util/exception_logger/test.py +114 -0
  210. bb_integrations_lib/util/utils.py +364 -0
  211. bb_integrations_lib/workers/__init__.py +0 -0
  212. bb_integrations_lib/workers/groups.py +13 -0
  213. bb_integrations_lib/workers/rpc_worker.py +50 -0
  214. bb_integrations_lib/workers/topics.py +20 -0
  215. bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
  216. bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
  217. bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
@@ -0,0 +1,500 @@
1
+ import asyncio
2
+ from datetime import datetime, UTC, timedelta
3
+ from typing import Any, Optional, Literal
4
+ from zoneinfo import ZoneInfo
5
+
6
+ from bson import ObjectId
7
+ from httpx import HTTPStatusError
8
+ from loguru import logger
9
+ from pydantic import BaseModel, ValidationError
10
+ from pymongo import MongoClient
11
+ from pymongo.synchronous.cursor import Cursor
12
+ from pymongo.synchronous.database import Database
13
+
14
+ from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
15
+ from bb_integrations_lib.protocols.pipelines import Step
16
+ from bb_integrations_lib.provider.api.platform_science.client import PlatformScienceClient
17
+ from bb_integrations_lib.provider.api.platform_science.model import JobDefinition, ShipmentDetails, ValueWithUnit, \
18
+ JobLocation, JobStep, JobTask, LoadDefinition, LoadEntity
19
+ from bb_integrations_lib.util.utils import lookup
20
+
21
+
22
+ class PlatSciLink(BaseModel):
23
+ """Platform Science linkage details to be stored in S&D order_v2 extra_data."""
24
+ job_id: str
25
+ completed: bool
26
+ last_order_state: str
27
+
28
+
29
+ class PlatformScienceOrderSyncStep(Step):
30
+ """
31
+ Export the current status of a Gravitate order to Platform Science workflow, either creating, updating, or
32
+ completing as necessary.
33
+ """
34
+
35
+ def __init__(
36
+ self,
37
+ sd_client: GravitateSDAPI,
38
+ psc: PlatformScienceClient,
39
+ mongo_database: Database,
40
+ order_nums: list[int] | None = None,
41
+ ps_link_key: str = "platform_science",
42
+ timezone: ZoneInfo = UTC,
43
+ *args, **kwargs
44
+ ):
45
+ super().__init__(*args, *kwargs)
46
+ self.sd_client = sd_client
47
+ self.psc = psc
48
+ self.db = mongo_database
49
+
50
+ self.order_nums = order_nums
51
+
52
+ self.loc_lkp: Optional[dict] = None
53
+ self.dt_lkp: Optional[dict] = None
54
+ self.ps_link_key = ps_link_key
55
+ self.timezone = timezone
56
+
57
+ def describe(self) -> str:
58
+ return "Update Platform Science workflow from Gravitate order status"
59
+
60
+ async def execute(self, i: Any) -> None:
61
+ if self.loc_lkp is None:
62
+ logger.info("Fetching locations from S&D")
63
+ locations = await self.sd_client.all_locations()
64
+ self.loc_lkp = lookup(locations.json(), lambda x: x["id"])
65
+
66
+ if self.order_nums:
67
+ logger.warning(f"Using provided order numbers for testing: {self.order_nums}")
68
+ changed_orders = []
69
+ for i, order_num in enumerate(self.order_nums):
70
+ logger.info(f"Downloading order {order_num} ({i+1}/{len(self.order_nums)})")
71
+ order_resp = await self.sd_client.get_orders(order_number=order_num)
72
+ order_resp.raise_for_status()
73
+ changed_orders.extend(order_resp.json())
74
+ else:
75
+ change_window = 60
76
+ logger.info(f"Fetching orders changed in the last {change_window} minutes from S&D")
77
+ changed_orders_resp = await self.sd_client.get_orders(
78
+ last_change_date=datetime.now(tz=UTC) - timedelta(minutes=change_window)
79
+ )
80
+ changed_orders = changed_orders_resp.json()
81
+
82
+ if not changed_orders:
83
+ logger.info(f"No changed orders, stopping")
84
+ return
85
+
86
+ if self.dt_lkp is None:
87
+ self.dt_lkp = {}
88
+ driver_tracking = await self.sd_client.get_driver_tracking(
89
+ order_numbers=[x["order_number"] for x in changed_orders])
90
+ for dt_item in driver_tracking.json():
91
+ for ao in dt_item["assigned_orders"]:
92
+ self.dt_lkp[ao["number"]] = dt_item
93
+
94
+ logger.info("Ordering orders by status: completions, updates, creations")
95
+ completions, updates, creations = self._categorize_orders(changed_orders)
96
+ logger.info(f"Processing {len(completions)} completions, {len(updates)} updates, {len(creations)} creations")
97
+ ordered_orders = completions + updates + creations
98
+
99
+ for order in ordered_orders:
100
+ try:
101
+ with logger.contextualize(order_number=order["order_number"]):
102
+ logger.info(f"Syncing order {order['order_number']}")
103
+ if order["order_number"] in self.dt_lkp.keys():
104
+ await self.sync_to_platform_science(order)
105
+ else:
106
+ logger.warning("No driver tracking record found for this order, skipping")
107
+ except HTTPStatusError as he:
108
+ if he.response.status_code == 404:
109
+ logger.warning("Got a 404 from PS syncing the order, resetting PS link.")
110
+ self._clear_ps_link(order["order_id"])
111
+ # Also have to clear out the extra_data field in the cached order object
112
+ order.get("extra_data", {}).pop(self.ps_link_key, None)
113
+ logger.info("Recreating PS order")
114
+ try:
115
+ await self.sync_to_platform_science(order)
116
+ except HTTPStatusError as retry_he:
117
+ logger.exception(f"Failed to recreate PS order after 404: {retry_he}: {retry_he.response.text}")
118
+ except Exception as retry_e:
119
+ logger.exception(f"Failed to recreate PS order after 404: {retry_e}")
120
+ else:
121
+ logger.exception(f"Failed to sync order {order['order_number']}: {he}: {he.response.text}")
122
+ except Exception as e:
123
+ logger.exception(f"Failed to sync order {order['order_number']}: {e}")
124
+
125
+ async def sync_to_platform_science(self, order: dict) -> None:
126
+ # First, figure out if we have an existing PS workflow for this order
127
+ # If we do, update the workflow. If not, create a new one. If we do have a workflow and this order is now
128
+ # completed, attempt to complete the workflow.
129
+ grav_order_state = order["order_state"]
130
+ extra_data = order.get("extra_data", {})
131
+ ps_link_raw = extra_data.get(self.ps_link_key)
132
+ try:
133
+ ps_link = PlatSciLink.model_validate(ps_link_raw)
134
+ except ValidationError:
135
+ if ps_link_raw is not None:
136
+ logger.warning(f"Malformed Platform Science link data from extra_data: {ps_link_raw}")
137
+ ps_link = None
138
+
139
+ driver = self._get_driver(order)
140
+ logger.info(f"For driver {driver}")
141
+
142
+ # Do we have good link data?
143
+ if ps_link:
144
+ # Is it updatable?
145
+ if not ps_link.completed:
146
+ if grav_order_state != "complete":
147
+ logger.info(f"Updating an existing PS workflow ({ps_link.job_id})")
148
+ await self.update_existing_ps_workflow(driver, ps_link.job_id, order)
149
+ if grav_order_state == "in progress" and ps_link.last_order_state != "in progress":
150
+ logger.info("Order transitioned to in progress, creating load")
151
+ try:
152
+ await self.create_load(order, driver)
153
+ except HTTPStatusError as he:
154
+ logger.warning(f"{he.response.status_code}: {he.response.text}")
155
+ else:
156
+ logger.info(f"Completing an uncompleted PS workflow ({ps_link.job_id})")
157
+ await self.complete_existing_ps_workflow(driver, ps_link.job_id, order)
158
+ else:
159
+ logger.info("PS workflow already completed, cannot send further updates")
160
+ else:
161
+ if driver:
162
+ logger.info("No PS workflow found, creating new")
163
+ await self.create_new_ps_workflow(order, driver)
164
+ if grav_order_state == "in progress":
165
+ logger.info("New order is in progress, creating load")
166
+ await self.create_load(order, driver)
167
+ else:
168
+ logger.error("No driver on order - must have a driver to create in Platform Science")
169
+
170
+ @staticmethod
171
+ def order_is_pre_assign(order: dict) -> bool:
172
+ return order["order_state"] == "accepted"
173
+
174
+ async def create_load(self, order: dict, driver: str) -> None:
175
+ entities = [
176
+ LoadEntity(
177
+ type="bill_of_lading",
178
+ value=str(order["order_number"])
179
+ ),
180
+ ]
181
+ if trailer := order.get("trailer"):
182
+ entities.append(LoadEntity(
183
+ type="trailer",
184
+ value=str(trailer)
185
+ ))
186
+
187
+ start_date = datetime.now(self.timezone).date()
188
+ # Get the latest drop ETA for this order, or set end_date = start_date if there are no drops / ETAs
189
+ end_date = datetime.min
190
+ for drop in order["drops"]:
191
+ eta = drop.get("eta")
192
+ if eta:
193
+ eta = datetime.fromisoformat(eta)
194
+ end_date = max(end_date, eta)
195
+ if end_date == datetime.min:
196
+ end_date = start_date
197
+ else:
198
+ end_date = end_date.astimezone(self.timezone).date()
199
+
200
+ resp = await self.psc.create_load(
201
+ driver,
202
+ LoadDefinition(
203
+ start_date=start_date,
204
+ end_date=end_date,
205
+ # TODO: Depends on this func getting called only when the order is completed
206
+ user_external_id=driver,
207
+ load=None,
208
+ entities=entities
209
+ )
210
+ )
211
+ if resp.is_error:
212
+ logger.error(resp.json())
213
+ resp.raise_for_status()
214
+
215
+ async def create_new_ps_workflow(self, order: dict, driver: str) -> None:
216
+ resp = await self.psc.create_workflow_job(
217
+ driver,
218
+ job_definition=self._convert_grav_order_to_job_definition(
219
+ order, self.loc_lkp, self.dt_lkp, pre_assign=self.order_is_pre_assign(order)
220
+ )
221
+ )
222
+ if resp.is_error:
223
+ logger.error(resp.json())
224
+ resp.raise_for_status()
225
+ body = resp.json()
226
+ logger.info(f"Created order, PS response {body}")
227
+ logger.info("Setting S&D extra_data")
228
+ job_id = str(body["data"]["job_id"])
229
+
230
+ order_completed = order["order_state"] == "complete"
231
+ if order_completed:
232
+ logger.info("Order already completed, completing PS workflow")
233
+ await self.complete_existing_ps_workflow(driver, job_id, order)
234
+ else:
235
+ self._save_ps_link(
236
+ order["order_id"],
237
+ PlatSciLink(job_id=job_id, completed=order_completed, last_order_state=order["order_state"])
238
+ )
239
+
240
+ async def update_existing_ps_workflow(self, driver_id: str, job_id: str, order: dict) -> None:
241
+ resp = await self.psc.update_workflow_job(
242
+ driver_id,
243
+ job_id,
244
+ self._convert_grav_order_to_job_definition(order, self.loc_lkp, self.dt_lkp,
245
+ pre_assign=self.order_is_pre_assign(order))
246
+ )
247
+ resp.raise_for_status()
248
+ self._save_ps_link(
249
+ order["order_id"],
250
+ PlatSciLink(job_id=job_id, completed=False, last_order_state=order["order_state"])
251
+ )
252
+
253
+ async def complete_existing_ps_workflow(self, driver: str, ps_job_id: str, order: dict) -> None:
254
+ # last_change_date is okay here since the likely last update on the order was when it was completed, if this job
255
+ # runs soon after.
256
+ await self.update_existing_ps_workflow(driver, ps_job_id, order)
257
+ await self.psc.complete_workflow_job(ps_job_id, datetime.fromisoformat(order["last_change_date"]))
258
+ self._save_ps_link(
259
+ order["order_id"],
260
+ PlatSciLink(job_id=ps_job_id, completed=True, last_order_state=order["order_state"])
261
+ )
262
+
263
+ def _save_ps_link(self, order_id: str, psl: PlatSciLink):
264
+ ur = self.db["order_v2"].update_one(
265
+ filter={
266
+ "_id": ObjectId(order_id)
267
+ },
268
+ update={
269
+ "$set": {
270
+ f"extra_data.{self.ps_link_key}": psl.model_dump(mode="json")
271
+ }
272
+ }
273
+ )
274
+ logger.debug(f"Saved PS link: {psl}")
275
+
276
+ def _clear_ps_link(self, order_id: str):
277
+ ur = self.db["order_v2"].update_one(
278
+ filter={
279
+ "_id": ObjectId(order_id)
280
+ },
281
+ update={
282
+ "$unset": {
283
+ f"extra_data.{self.ps_link_key}": ""
284
+ }
285
+ }
286
+ )
287
+ logger.debug(f"Cleared PS link for order_id '{order_id}'")
288
+
289
+ @staticmethod
290
+ def _get_driver(order: dict) -> str | None:
291
+ drivers = order.get("drivers", [])
292
+ if not drivers:
293
+ logger.warning("No drivers found for order")
294
+ return None
295
+ if len(drivers) > 1:
296
+ # Sync job uses this, so it should match the external_id field in Platform Science
297
+ primary_driver = drivers[0]["username"]
298
+ logger.warning(f"More than one driver found for order, using first driver ({primary_driver})")
299
+ else:
300
+ primary_driver = drivers[0]["username"]
301
+ return primary_driver
302
+
303
+ def _bulk_get_ps_links(self, order_ids: list[str]) -> dict[str, Optional[PlatSciLink]]:
304
+ """Get PS links for a list of order ids"""
305
+ results = self.db["order_v2"].find(
306
+ filter={
307
+ "_id": {
308
+ "$in": [ObjectId(oid) for oid in order_ids]
309
+ }
310
+ },
311
+ projection={
312
+ "_id": 1,
313
+ f"extra_data.{self.ps_link_key}": 1
314
+ }
315
+ )
316
+
317
+ ps_links = PlatformScienceOrderSyncStep._validate_ps_links_by_order_id(results, self.ps_link_key)
318
+ return ps_links
319
+
320
+ @staticmethod
321
+ def _validate_ps_links_by_order_id(results: Cursor, ps_link_key: str) -> dict[str, Optional[PlatSciLink]]:
322
+ ps_links = {}
323
+ for doc in results:
324
+ order_id = str(doc["_id"])
325
+ ps_link_raw = doc.get("extra_data", {}).get(ps_link_key)
326
+ try:
327
+ ps_link = PlatSciLink.model_validate(ps_link_raw) if ps_link_raw else None
328
+ except ValidationError:
329
+ logger.warning(f"Bad Platform Science link data for order {order_id}: {ps_link_raw}")
330
+ ps_link = None
331
+ ps_links[order_id] = ps_link
332
+ return ps_links
333
+
334
+ def _categorize_orders(self, changed_orders: list[dict]) -> tuple[list[dict], list[dict], list[dict]]:
335
+ """
336
+ Categorize orders into completions, updates, and creations based on PS link status.
337
+ """
338
+ order_ids = [order["order_id"] for order in changed_orders]
339
+ ps_links = self._bulk_get_ps_links(order_ids)
340
+
341
+ completions = []
342
+ updates = []
343
+ creations = []
344
+
345
+ for order in changed_orders:
346
+ order_id = order["order_id"]
347
+ ps_link = ps_links.get(order_id)
348
+
349
+ if order["order_number"] not in self.dt_lkp.keys():
350
+ logger.warning(
351
+ f"No driver tracking record found for order {order['order_number']}, skipping order...")
352
+ continue
353
+
354
+ if ps_link and not ps_link.completed:
355
+ if order["order_state"] == "complete":
356
+ completions.append(order)
357
+ else:
358
+ updates.append(order)
359
+ else:
360
+ creations.append(order)
361
+
362
+ return completions, updates, creations
363
+
364
+ @staticmethod
365
+ def _convert_grav_order_to_job_definition(order: dict, loc_lkp: dict, dt_lkp: dict,
366
+ pre_assign: bool) -> JobDefinition:
367
+ loads = order["loads"]
368
+ drops = order["drops"]
369
+ return JobDefinition(
370
+ status="pre_assign" if pre_assign else "active",
371
+ external_id=str(order["order_number"]),
372
+ locations=[PlatformScienceOrderSyncStep._convert_to_job_location(load, loc_lkp)
373
+ for load in loads] +
374
+ [PlatformScienceOrderSyncStep._convert_to_job_location(drop, loc_lkp)
375
+ for drop in drops],
376
+ steps=PlatformScienceOrderSyncStep._loads_and_drops_to_steps(order["loads"], order["drops"],
377
+ order["order_number"], dt_lkp),
378
+ shipment_details=ShipmentDetails(
379
+ total_distance=ValueWithUnit(
380
+ value=order["total_miles"],
381
+ uom="miles"
382
+ )
383
+ )
384
+ )
385
+
386
+ @staticmethod
387
+ def _convert_to_job_location(load_or_drop: dict, loc_lkp: dict) -> JobLocation:
388
+ """
389
+ Loads and drops have a very similar (identical?) structure, so we can reuse this method for either case.
390
+ Most of the data comes from the S&D location lookup anyway.
391
+ """
392
+ sd_loc = loc_lkp[load_or_drop["location_id"]]
393
+ return JobLocation(
394
+ external_id=load_or_drop["location_id"], # TODO: Do we want to expose the location ID here?
395
+ name=load_or_drop["location_name"],
396
+ address=sd_loc["address"],
397
+ latitude=f"{sd_loc['lat']:.4f}",
398
+ longitude=f"{sd_loc['lon']:.4f}",
399
+ city=sd_loc["city"],
400
+ state=sd_loc["state"],
401
+ country_code="US", # TODO: Don't hardcode this
402
+ )
403
+
404
+ @staticmethod
405
+ def _loads_and_drops_to_steps(loads: list[dict], drops: list[dict], order_number: int, dt_lkp: dict) -> list[
406
+ JobStep]:
407
+ dt_this = dt_lkp[order_number]
408
+ inc = 1
409
+ steps = []
410
+ for load in loads:
411
+ steps.append(
412
+ PlatformScienceOrderSyncStep._convert_load_or_drop_to_job_step(load, inc, order_number, "Load",
413
+ dt_this))
414
+ inc += 1
415
+ for drop in drops:
416
+ steps.append(
417
+ PlatformScienceOrderSyncStep._convert_load_or_drop_to_job_step(drop, inc, order_number, "Drop",
418
+ dt_this))
419
+ inc += 1
420
+ return steps
421
+
422
+ @staticmethod
423
+ def _convert_task(eid: str, product_name: str, task_order: int, load_or_drop: Literal["Load", "Drop"],
424
+ completed: bool, completed_at: datetime) -> JobTask:
425
+ return JobTask(
426
+ remarks=[],
427
+ fields={},
428
+ id=str(task_order),
429
+ name=f"{load_or_drop} Product ({product_name})",
430
+ # What types are available and how do we use them? "arrived" is the only one I have verified so far
431
+ type="arrived",
432
+ completed=completed,
433
+ completed_at=completed_at,
434
+ external_id=eid,
435
+ order=task_order,
436
+ status="New",
437
+ )
438
+
439
+ @staticmethod
440
+ def _convert_load_or_drop_to_job_step(data: dict, step_order: int, order_number: int,
441
+ load_or_drop: Literal["Load", "Drop"], dt_this: dict) -> JobStep:
442
+ product_names = {x["product_name"] for x in data["details"]}
443
+ # Every detail that we care about can be represented by the first item in its group
444
+ detail_len = len(product_names)
445
+ detail_slug = f"{detail_len} product" + ("s" if detail_len > 1 else "")
446
+
447
+ completed = data["status"] == "complete"
448
+ completed_at = None
449
+ # Locate the update in the DT data, and filter to route events for this order specifically
450
+ for route in filter(lambda x: x["order_number"] == order_number, dt_this["route"]):
451
+ action_map = {
452
+ "Load": "loading",
453
+ "Drop": "dropping"
454
+ }
455
+ action_matches = action_map[load_or_drop] == route["action"]
456
+ destination = route["destination_id"]
457
+ if action_matches and destination == data["location_id"]:
458
+ completed = route["complete"]
459
+ if completed:
460
+ completed_at = route["end_time"]
461
+ else:
462
+ completed_at = None
463
+ break
464
+
465
+ return JobStep(
466
+ tasks=[
467
+ PlatformScienceOrderSyncStep._convert_task(f"{order_number}-{step_order}-{task_order}", d, task_order,
468
+ load_or_drop, completed, completed_at)
469
+ for task_order, d in enumerate(product_names)
470
+ ],
471
+ order=step_order,
472
+ completed=completed,
473
+ completed_at=completed_at,
474
+ type="New",
475
+ name=f"{load_or_drop} {detail_slug}",
476
+ external_id=f"{order_number}-{step_order}",
477
+ location_external_id=data["location_id"]
478
+ )
479
+
480
+
481
+ if __name__ == "__main__":
482
+ async def main():
483
+ step = PlatformScienceOrderSyncStep(
484
+ sd_client=GravitateSDAPI(
485
+ base_url="",
486
+ client_id="",
487
+ client_secret=""
488
+ ),
489
+ psc=PlatformScienceClient(
490
+ base_url="",
491
+ client_id="",
492
+ client_secret=""
493
+ ),
494
+ order_nums=[],
495
+ mongo_database=MongoClient("mongodb conn str")["db_name"]
496
+ )
497
+ res = await step.execute(None)
498
+
499
+
500
+ asyncio.run(main())
@@ -0,0 +1,15 @@
1
+ from bb_integrations_lib.protocols.pipelines import Step
2
+ from bb_integrations_lib.shared.model import RawData
3
+
4
+
5
+ class SaveRawDataToDiskStep(Step):
6
+ """Save a RawData object to disk in the current working directory."""
7
+ def __init__(self, *args, **kwargs):
8
+ super().__init__(*args, **kwargs)
9
+
10
+ def describe(self) -> str:
11
+ return "Save a RawData object to the current working directory"
12
+
13
+ async def execute(self, i: RawData) -> None:
14
+ with open(i.file_name, "wb") as f:
15
+ f.write(i.data)
@@ -0,0 +1,60 @@
1
+ import os
2
+ from typing import Any, Dict
3
+
4
+ import pandas as pd
5
+
6
+ from bb_integrations_lib.models.pipeline_structs import BolExportResults, NoPipelineData
7
+ from bb_integrations_lib.protocols.pipelines import Step
8
+ from bb_integrations_lib.provider.ftp.client import FTPIntegrationClient
9
+ from bb_integrations_lib.secrets.credential_models import FTPCredential
10
+ from bb_integrations_lib.shared.model import FileReference, File, RawData, FileConfigRawData
11
+
12
+
13
+ class SFTPExportFileStep(Step):
14
+ def __init__(
15
+ self,
16
+ ftp_client: FTPIntegrationClient,
17
+ ftp_destination_dir: str,
18
+ field_sep: str = ",",
19
+ allow_empty: bool = False,
20
+ *args, **kwargs
21
+ ) -> None:
22
+ super().__init__(*args, **kwargs)
23
+ self.ftp_client = ftp_client
24
+ self.ftp_destination_dir = ftp_destination_dir
25
+ self.field_sep = field_sep
26
+ self.allow_empty = allow_empty
27
+
28
+ def describe(self) -> str:
29
+ return "SFTP File Export"
30
+
31
+ async def execute(self, i: FileReference | RawData | BolExportResults | FileConfigRawData) -> FileReference:
32
+ if isinstance(i, FileReference):
33
+ if i.is_empty and not self.allow_empty:
34
+ raise NoPipelineData("File is empty")
35
+ file_name = os.path.basename(i.file_path)
36
+ with open(i.file_path, "rb") as f:
37
+ file_data = f.read()
38
+ file = File(
39
+ file_name=file_name, # The sftp_client adds another CSV to the file name, so strip it off here.
40
+ file_data=file_data,
41
+ )
42
+ self.ftp_client.upload_file(file, self.ftp_destination_dir)
43
+ return i
44
+ elif isinstance(i, RawData) or isinstance(i, FileConfigRawData):
45
+ if i.is_empty and not self.allow_empty:
46
+ raise NoPipelineData("Data is empty")
47
+ file = File(
48
+ file_name = i.file_name,
49
+ file_data = i.data
50
+ )
51
+ self.ftp_client.upload_file(file, self.ftp_destination_dir)
52
+ elif isinstance(i, BolExportResults):
53
+ if i.is_empty and not self.allow_empty:
54
+ raise NoPipelineData("No contents to export")
55
+ df = pd.DataFrame.from_records(i.orders)
56
+ csv_text = df.to_csv(index=False, sep=self.field_sep)
57
+ file = File(file_name=i.file_name, file_data=csv_text)
58
+ self.ftp_client.upload_file(file, self.ftp_destination_dir)
59
+ else:
60
+ raise NotImplementedError(f"Cannot export unknown file wrapper type {type(i)} to SFTP")
@@ -0,0 +1,23 @@
1
+ from typing import Any, Dict, List, Union
2
+
3
+ from bb_integrations_lib.models.pipeline_structs import BolExportResults
4
+ from bb_integrations_lib.protocols.pipelines import Step
5
+ from bb_integrations_lib.shared.model import FileReference, RawData
6
+ from .sftp_export_file_step import SFTPExportFileStep
7
+
8
+
9
+ class SFTPExportManyFilesStep(Step):
10
+ def __init__(self, *args, **kwargs) -> None:
11
+ super().__init__(step_configuration)
12
+ self.sftp_export_step = SFTPExportFileStep(step_configuration)
13
+
14
+ def describe(self) -> str:
15
+ return "SFTP Many Files Export"
16
+
17
+ async def execute(self, files: List[Union[FileReference, RawData, BolExportResults]]) -> List[FileReference]:
18
+ results = []
19
+ for file in files:
20
+ result = await self.sftp_export_step.execute(file)
21
+ if result:
22
+ results.append(result)
23
+ return results
@@ -0,0 +1,64 @@
1
+ import uuid
2
+ from datetime import datetime, UTC
3
+
4
+ from google.cloud import bigquery
5
+ from google.oauth2 import service_account
6
+ from loguru import logger
7
+
8
+ from bb_integrations_lib.models.pipeline_structs import BolExportResults
9
+ from bb_integrations_lib.protocols.pipelines import Step
10
+
11
+
12
+ class UpdateExportedOrdersTableStep(Step[BolExportResults, BolExportResults, None]):
13
+ def __init__(self, step_configuration):
14
+ super().__init__(step_configuration)
15
+ self.exported_order_table_name = step_configuration["exported_order_table_name"]
16
+ self.exported_order_errors_table_name = step_configuration["exported_order_errors_table_name"]
17
+ self.project_id = step_configuration["project_id"]
18
+ self.gcp_credentials_file = step_configuration["gcp_credentials_file"]
19
+ if not self.gcp_credentials_file.endswith(".json"):
20
+ self.gcp_credentials_file += ".json"
21
+
22
+ def describe(self) -> str:
23
+ return "Update the GCP bigquery table to include the newly-exported orders"
24
+
25
+ async def execute(self, results: BolExportResults) -> BolExportResults:
26
+ orders = results.orders
27
+ unique_order_numbers_exported = set(x[results.order_number_key] for x in orders)
28
+ now = datetime.now(UTC).isoformat().split("+")[0]
29
+ credentials = service_account.Credentials.from_service_account_file(self.gcp_credentials_file)
30
+ if len(unique_order_numbers_exported) > 0:
31
+ to_insert = [{
32
+ "file_name": results.file_name,
33
+ "date": now,
34
+ "order_number": x
35
+ } for x in unique_order_numbers_exported]
36
+ logger.debug(f"Inserting {len(to_insert)} records for run {results.file_name}")
37
+ client = bigquery.Client(credentials=credentials)
38
+ gbq_errors = client.insert_rows_json(self.exported_order_table_name, to_insert)
39
+ if gbq_errors:
40
+ logger.error(gbq_errors)
41
+ else:
42
+ logger.debug(f"Updated {self.exported_order_table_name}")
43
+ else:
44
+ logger.debug("No exported orders to insert.")
45
+
46
+ errors_insert = [{
47
+ "id": str(uuid.uuid4()),
48
+ "export_id": results.file_name,
49
+ "order_number": str(x["order_number"]),
50
+ "error": x["error"],
51
+ "date": now
52
+ } for x in results.errors]
53
+ if len(errors_insert) > 0:
54
+ logger.debug(f"Inserting {len(errors_insert)} records for run {results.file_name}")
55
+ client = bigquery.Client(credentials=credentials)
56
+ gbq_errors = client.insert_rows_json(self.exported_order_errors_table_name, errors_insert)
57
+ if gbq_errors:
58
+ logger.error(gbq_errors)
59
+ else:
60
+ logger.debug(f"Updated {self.exported_order_errors_table_name}")
61
+ else:
62
+ logger.debug(f"No errors to insert.")
63
+
64
+ return results
@@ -0,0 +1,22 @@
1
+ from typing import Callable, AsyncIterable, TypeVar
2
+
3
+ from bb_integrations_lib.protocols.pipelines import GeneratorStep
4
+
5
+ T = TypeVar("T")
6
+
7
+
8
+ class FilterStep(GeneratorStep[T, T]):
9
+ """
10
+ A step that will yield incoming data if filter_func returns true when passed the incoming data.
11
+ """
12
+
13
+ def __init__(self, filter_func: Callable[[T], bool], *args, **kwargs):
14
+ super().__init__(*args, **kwargs)
15
+ self.filter_func = filter_func
16
+
17
+ def describe(self) -> str:
18
+ return "Filter step execution based on data"
19
+
20
+ async def generator(self, i: T) -> AsyncIterable[T]:
21
+ if self.filter_func(i):
22
+ yield i