bb-integrations-library 3.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (217) hide show
  1. bb_integrations_lib/__init__.py +0 -0
  2. bb_integrations_lib/converters/__init__.py +0 -0
  3. bb_integrations_lib/gravitate/__init__.py +0 -0
  4. bb_integrations_lib/gravitate/base_api.py +20 -0
  5. bb_integrations_lib/gravitate/model.py +29 -0
  6. bb_integrations_lib/gravitate/pe_api.py +122 -0
  7. bb_integrations_lib/gravitate/rita_api.py +552 -0
  8. bb_integrations_lib/gravitate/sd_api.py +572 -0
  9. bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
  10. bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
  11. bb_integrations_lib/gravitate/testing/__init__.py +0 -0
  12. bb_integrations_lib/gravitate/testing/builder.py +55 -0
  13. bb_integrations_lib/gravitate/testing/openapi.py +70 -0
  14. bb_integrations_lib/gravitate/testing/util.py +274 -0
  15. bb_integrations_lib/mappers/__init__.py +0 -0
  16. bb_integrations_lib/mappers/prices/__init__.py +0 -0
  17. bb_integrations_lib/mappers/prices/model.py +106 -0
  18. bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
  19. bb_integrations_lib/mappers/prices/protocol.py +20 -0
  20. bb_integrations_lib/mappers/prices/util.py +61 -0
  21. bb_integrations_lib/mappers/rita_mapper.py +523 -0
  22. bb_integrations_lib/models/__init__.py +0 -0
  23. bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
  24. bb_integrations_lib/models/enums.py +28 -0
  25. bb_integrations_lib/models/pipeline_structs.py +76 -0
  26. bb_integrations_lib/models/probe/probe_event.py +20 -0
  27. bb_integrations_lib/models/probe/request_data.py +431 -0
  28. bb_integrations_lib/models/probe/resume_token.py +7 -0
  29. bb_integrations_lib/models/rita/audit.py +113 -0
  30. bb_integrations_lib/models/rita/auth.py +30 -0
  31. bb_integrations_lib/models/rita/bucket.py +17 -0
  32. bb_integrations_lib/models/rita/config.py +188 -0
  33. bb_integrations_lib/models/rita/constants.py +19 -0
  34. bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
  35. bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
  36. bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
  37. bb_integrations_lib/models/rita/crossroads_network.py +41 -0
  38. bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
  39. bb_integrations_lib/models/rita/email.py +39 -0
  40. bb_integrations_lib/models/rita/issue.py +63 -0
  41. bb_integrations_lib/models/rita/mapping.py +227 -0
  42. bb_integrations_lib/models/rita/probe.py +58 -0
  43. bb_integrations_lib/models/rita/reference_data.py +110 -0
  44. bb_integrations_lib/models/rita/source_system.py +9 -0
  45. bb_integrations_lib/models/rita/workers.py +76 -0
  46. bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
  47. bb_integrations_lib/models/sd/get_order.py +301 -0
  48. bb_integrations_lib/models/sd/orders.py +18 -0
  49. bb_integrations_lib/models/sd_api.py +115 -0
  50. bb_integrations_lib/pipelines/__init__.py +0 -0
  51. bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
  52. bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
  53. bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
  54. bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
  55. bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
  56. bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
  57. bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
  58. bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
  59. bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
  60. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
  61. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
  62. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
  63. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
  64. bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
  65. bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
  66. bb_integrations_lib/pipelines/shared/__init__.py +0 -0
  67. bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
  68. bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
  69. bb_integrations_lib/pipelines/steps/__init__.py +0 -0
  70. bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
  71. bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
  72. bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
  73. bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
  74. bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
  75. bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
  76. bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
  77. bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
  78. bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
  79. bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
  80. bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
  81. bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
  82. bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
  83. bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
  84. bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
  85. bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
  86. bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
  87. bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
  88. bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
  89. bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
  90. bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
  91. bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
  92. bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
  93. bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
  94. bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
  95. bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
  96. bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
  97. bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
  98. bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
  99. bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
  100. bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
  101. bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
  102. bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
  103. bb_integrations_lib/pipelines/steps/null_step.py +15 -0
  104. bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
  105. bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
  106. bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
  107. bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
  108. bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
  109. bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
  110. bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
  111. bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
  112. bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
  113. bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
  114. bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
  115. bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
  116. bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
  117. bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
  118. bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
  119. bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
  120. bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
  121. bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
  122. bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
  123. bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
  124. bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
  125. bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
  126. bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
  127. bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
  128. bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
  129. bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
  130. bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
  131. bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
  132. bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
  133. bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
  134. bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
  135. bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
  136. bb_integrations_lib/protocols/__init__.py +0 -0
  137. bb_integrations_lib/protocols/flat_file.py +210 -0
  138. bb_integrations_lib/protocols/gravitate_client.py +104 -0
  139. bb_integrations_lib/protocols/pipelines.py +697 -0
  140. bb_integrations_lib/provider/__init__.py +0 -0
  141. bb_integrations_lib/provider/api/__init__.py +0 -0
  142. bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
  143. bb_integrations_lib/provider/api/cargas/client.py +43 -0
  144. bb_integrations_lib/provider/api/cargas/model.py +49 -0
  145. bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
  146. bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
  147. bb_integrations_lib/provider/api/dtn/client.py +128 -0
  148. bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
  149. bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
  150. bb_integrations_lib/provider/api/keyvu/client.py +30 -0
  151. bb_integrations_lib/provider/api/keyvu/model.py +149 -0
  152. bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
  153. bb_integrations_lib/provider/api/macropoint/client.py +28 -0
  154. bb_integrations_lib/provider/api/macropoint/model.py +40 -0
  155. bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
  156. bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
  157. bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
  158. bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
  159. bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
  160. bb_integrations_lib/provider/api/platform_science/client.py +147 -0
  161. bb_integrations_lib/provider/api/platform_science/model.py +82 -0
  162. bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
  163. bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
  164. bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
  165. bb_integrations_lib/provider/api/telapoint/client.py +68 -0
  166. bb_integrations_lib/provider/api/telapoint/model.py +178 -0
  167. bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
  168. bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
  169. bb_integrations_lib/provider/aws/__init__.py +0 -0
  170. bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
  171. bb_integrations_lib/provider/aws/s3/client.py +126 -0
  172. bb_integrations_lib/provider/ftp/__init__.py +0 -0
  173. bb_integrations_lib/provider/ftp/client.py +140 -0
  174. bb_integrations_lib/provider/ftp/interface.py +273 -0
  175. bb_integrations_lib/provider/ftp/model.py +76 -0
  176. bb_integrations_lib/provider/imap/__init__.py +0 -0
  177. bb_integrations_lib/provider/imap/client.py +228 -0
  178. bb_integrations_lib/provider/imap/model.py +3 -0
  179. bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
  180. bb_integrations_lib/provider/sqlserver/client.py +106 -0
  181. bb_integrations_lib/secrets/__init__.py +4 -0
  182. bb_integrations_lib/secrets/adapters.py +98 -0
  183. bb_integrations_lib/secrets/credential_models.py +222 -0
  184. bb_integrations_lib/secrets/factory.py +85 -0
  185. bb_integrations_lib/secrets/providers.py +160 -0
  186. bb_integrations_lib/shared/__init__.py +0 -0
  187. bb_integrations_lib/shared/exceptions.py +25 -0
  188. bb_integrations_lib/shared/model.py +1039 -0
  189. bb_integrations_lib/shared/shared_enums.py +510 -0
  190. bb_integrations_lib/storage/README.md +236 -0
  191. bb_integrations_lib/storage/__init__.py +0 -0
  192. bb_integrations_lib/storage/aws/__init__.py +0 -0
  193. bb_integrations_lib/storage/aws/s3.py +8 -0
  194. bb_integrations_lib/storage/defaults.py +72 -0
  195. bb_integrations_lib/storage/gcs/__init__.py +0 -0
  196. bb_integrations_lib/storage/gcs/client.py +8 -0
  197. bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
  198. bb_integrations_lib/storage/gcsmanager/client.py +8 -0
  199. bb_integrations_lib/storage/setup.py +29 -0
  200. bb_integrations_lib/util/__init__.py +0 -0
  201. bb_integrations_lib/util/cache/__init__.py +0 -0
  202. bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
  203. bb_integrations_lib/util/cache/protocol.py +9 -0
  204. bb_integrations_lib/util/config/__init__.py +0 -0
  205. bb_integrations_lib/util/config/manager.py +391 -0
  206. bb_integrations_lib/util/config/model.py +41 -0
  207. bb_integrations_lib/util/exception_logger/__init__.py +0 -0
  208. bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
  209. bb_integrations_lib/util/exception_logger/test.py +114 -0
  210. bb_integrations_lib/util/utils.py +364 -0
  211. bb_integrations_lib/workers/__init__.py +0 -0
  212. bb_integrations_lib/workers/groups.py +13 -0
  213. bb_integrations_lib/workers/rpc_worker.py +50 -0
  214. bb_integrations_lib/workers/topics.py +20 -0
  215. bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
  216. bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
  217. bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
@@ -0,0 +1,185 @@
1
+ from datetime import datetime, UTC
2
+ from enum import StrEnum
3
+ from time import monotonic
4
+ from typing import Any
5
+
6
+ import tenacity
7
+ from loguru import logger
8
+ from pymongo import UpdateOne
9
+ from tenacity import retry, wait_exponential
10
+
11
+ from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
12
+ from bb_integrations_lib.models.pipeline_structs import BolExportResults
13
+ from bb_integrations_lib.models.sd.orders import BackofficeERP, ERPStatus
14
+ from bb_integrations_lib.protocols.pipelines import Step
15
+ from bb_integrations_lib.util.utils import init_db
16
+
17
+
18
+ class TooManyRequests(Exception):
19
+ pass
20
+
21
+
22
+ class MarkOrdersExportedMethod(StrEnum):
23
+ API = "api"
24
+ BACKHAUL_API = "backhaul_api"
25
+ DB = "db"
26
+
27
+
28
+ class MarkOrdersExportedInBBDStep(Step):
29
+ """
30
+ This step marks BolExportResult orders as exported in the order movements page.
31
+
32
+ It supports 2 different marking methods:
33
+ 1. API method: Uses the S&D API, and runs an __always_succeed backoffice_erp function on every order. This is the
34
+ default, and very slow.
35
+ 2. DB method: Directly modifies the S&D DB. Much faster, but subject to S&D schema changes.
36
+
37
+ Note that ``BolExportResults.errors`` must be a dict in the form ``{"order_number": int, "error": str}``.
38
+
39
+ **Limitations**:
40
+ If using the API method, the __always_succeed function must be set in the target S&D tenant. Error messages cannot
41
+ be set and will get discarded.
42
+ """
43
+
44
+ def __init__(self, order_number_key: str, export_method: MarkOrdersExportedMethod = MarkOrdersExportedMethod.API,
45
+ export_function_name: str | None = None, sd_client: GravitateSDAPI | None = None,
46
+ mongodb_conn_str: str | None = None, mongodb_db_name: str | None = None, *args, **kwargs):
47
+ super().__init__(*args, **kwargs)
48
+ self.export_method = export_method
49
+ self.function_name = export_function_name
50
+ self.order_number_key = order_number_key
51
+
52
+ self.sd_client = sd_client
53
+ self.mongodb_conn_str = mongodb_conn_str
54
+ self.mongodb_db_name = mongodb_db_name
55
+
56
+ match self.export_method:
57
+ case MarkOrdersExportedMethod.API | MarkOrdersExportedMethod.BACKHAUL_API:
58
+ logger.debug(f"Marking orders exported using {self.export_method} method")
59
+ case MarkOrdersExportedMethod.DB:
60
+ logger.debug("Marking orders exported using direct DB access method")
61
+
62
+ def describe(self) -> str:
63
+ return "Mark orders as exported in the order movements page"
64
+
65
+ async def execute(self, i: BolExportResults) -> BolExportResults:
66
+ unique_orders = list(set([int(order[self.order_number_key]) for order in i.orders]))
67
+ match self.export_method:
68
+ case MarkOrdersExportedMethod.API | MarkOrdersExportedMethod.BACKHAUL_API:
69
+ await self._mark_all_exported_api(unique_orders)
70
+ case MarkOrdersExportedMethod.DB:
71
+ await self._mark_exported_db(unique_orders, i.errors)
72
+
73
+ return i
74
+
75
+ async def _mark_all_exported_api(self, unique_orders: list[int]):
76
+ start_time = monotonic()
77
+ n_orders = len(unique_orders)
78
+ for index, number in enumerate(unique_orders):
79
+ try:
80
+ if self.export_method == MarkOrdersExportedMethod.API:
81
+ await self._mark_exported_api(number)
82
+ else:
83
+ await self._mark_exported_backhaul_api(number)
84
+ elapsed_time = monotonic() - start_time
85
+ logger.info(
86
+ f"Marked order {number} done ({index + 1} of {n_orders}, {(index + 1) / n_orders:.0%}). "
87
+ f"Total elapsed: {int(elapsed_time)}s")
88
+ except Exception as e:
89
+ logger.warning(f"Unable to mark order as exported. {e}")
90
+
91
+ logger.info(f"Finished processing {n_orders} orders in {int(monotonic() - start_time)}s")
92
+
93
+ @retry(
94
+ retry=tenacity.retry_if_exception_type(TooManyRequests),
95
+ wait=wait_exponential(multiplier=1, min=5, max=30),
96
+ stop=tenacity.stop_after_delay(60),
97
+ )
98
+ async def _mark_exported_api(self, number: int):
99
+ """
100
+ Use the S&D API to mark a single order as exported with an __always_succeed function.
101
+ Less reliable and much slower than the DB method, and requires the __always_succeed function to be set in the
102
+ target S&D tenant.
103
+ """
104
+ resp = await self.sd_client.export_single_order(number, self.function_name)
105
+ logger.debug(f"Order {number} response status code: {resp.status_code}")
106
+ if resp.status_code == 429:
107
+ raise TooManyRequests()
108
+ resp.raise_for_status()
109
+ resp_json = resp.json()
110
+ if len(resp_json) == 0:
111
+ raise Exception(resp_json)
112
+
113
+ @retry(
114
+ retry=tenacity.retry_if_exception_type(TooManyRequests),
115
+ wait=wait_exponential(multiplier=1, min=5, max=30),
116
+ stop=tenacity.stop_after_delay(60),
117
+ )
118
+ async def _mark_exported_backhaul_api(self, number: int):
119
+ """Use the S&D API to mark a single backhaul order as exported."""
120
+ resp = await self.sd_client.mark_backhaul_exported(number)
121
+ logger.debug(f"Order {number} response status code: {resp.status_code}")
122
+ if resp.status_code == 429:
123
+ raise TooManyRequests()
124
+ resp.raise_for_status()
125
+ resp_json = resp.json()
126
+ if len(resp_json) == 0:
127
+ raise Exception(resp_json)
128
+
129
+ async def _mark_exported_db(self, unique_orders: list[int], errors: list[dict[str, Any]]):
130
+ """
131
+ Mark multiple orders as exported by manually twiddling the S&D DB. This avoids the reliability and
132
+ performance issues of the API approach, and supports setting error text to be shown in the UI, but is at the
133
+ mercy of the DB schema changing underneath us.
134
+
135
+ :param unique_orders: List of order numbers to mark as exported / "sent"
136
+ :param errors: List of errors to mark as failed, each in the format ``{"order_number": int, "error": str}``
137
+ """
138
+ sent_erp_obj = BackofficeERP(status=ERPStatus.sent, time_sent=datetime.now(UTC), errors=[]).model_dump()
139
+ with init_db(self.mongodb_conn_str, self.mongodb_db_name) as db:
140
+ # Mark successful exports
141
+ if len(unique_orders) > 0:
142
+ mark_sent_result = db["order_v2"].update_many(filter={"number": {"$in": unique_orders}},
143
+ update={"$set": {"backoffice_erp": sent_erp_obj}})
144
+ logger.info(f"Marked {mark_sent_result.modified_count} orders as exported")
145
+ else:
146
+ logger.info("No successfully exported orders to mark")
147
+
148
+ # Mark failed exports with error messages
149
+ update_list = [UpdateOne(
150
+ filter={"number": error["order_number"]},
151
+ update={"$set": {
152
+ "backoffice_erp": self._error_to_backoffice_erp_obj(error["error"]).model_dump()
153
+ }})
154
+ for error in errors
155
+ ]
156
+ if len(update_list) > 0:
157
+ mark_failed_result = db["order_v2"].bulk_write(update_list)
158
+ logger.info(f"Marked {mark_failed_result.modified_count} orders as failed to export")
159
+ else:
160
+ logger.info("No export failures to mark")
161
+
162
+ @staticmethod
163
+ def _error_to_backoffice_erp_obj(error_message: str):
164
+ return BackofficeERP(status=ERPStatus.errors, time_sent=datetime.now(UTC), errors=[error_message])
165
+
166
+
167
+ if __name__ == "__main__":
168
+ import asyncio
169
+
170
+ async def main():
171
+ order_nums = [10000]
172
+ orders = [{"OrderNumber":x} for x in order_nums]
173
+
174
+ s = MarkOrdersExportedInBBDStep(
175
+ order_number_key="OrderNumber",
176
+ export_function_name="__always_succeed",
177
+ export_method=MarkOrdersExportedMethod.API,
178
+ sd_client=GravitateSDAPI(
179
+ username="",
180
+ password="",
181
+ base_url="",
182
+ ))
183
+ await s.execute(BolExportResults(orders=orders, errors=[], file_name="oneoff", order_number_key="OrderNumber"))
184
+
185
+ asyncio.run(main())
@@ -0,0 +1,174 @@
1
+ import copy
2
+ from typing import Dict, List, Tuple
3
+
4
+ from loguru import logger
5
+
6
+ from bb_integrations_lib.mappers.prices.model import Groups, \
7
+ PricingIntegrationConfig
8
+ from bb_integrations_lib.mappers.prices.price_mapper import PriceMapper
9
+ from bb_integrations_lib.protocols.pipelines import Step
10
+ from bb_integrations_lib.shared.exceptions import MappingNotFoundException
11
+ from bb_integrations_lib.shared.model import PEPriceData, SupplyPriceUpdateManyRequest
12
+
13
+
14
+ class PEParsePricesStep(Step):
15
+ def __init__(self, config: PricingIntegrationConfig, *args, **kwargs):
16
+ super().__init__(*args, **kwargs)
17
+
18
+ self.config = config
19
+ self.pm = PriceMapper(
20
+ ttl=self.config.price_mapper_ttl,
21
+ debug_mode=self.config.price_mapper_debug_mode,
22
+ config=self.config
23
+ )
24
+
25
+ async def execute(self, rows: List[PEPriceData]) -> Tuple[
26
+ List[SupplyPriceUpdateManyRequest], Dict]:
27
+ return await self.process_rows(rows)
28
+
29
+ def get_price_request(self, rows: List[PEPriceData]) -> List[SupplyPriceUpdateManyRequest]:
30
+
31
+ res: List = []
32
+ for row in rows:
33
+ price = row.CurvePointPrices[0].Value
34
+ res.append(
35
+ SupplyPriceUpdateManyRequest
36
+ (
37
+ source_id=self.config.source_system_id,
38
+ source_system_id=self.config.source_system,
39
+ terminal_source_id=row.LocationSourceId if self.config.use_source_system_id else row.LocationSourceIdString,
40
+ terminal_source_system_id=row.LocationSourceId,
41
+ effective_from=row.EffectiveFromDateTime,
42
+ effective_to=row.EffectiveToDateTime,
43
+ expire=row.ExpirationDateTime,
44
+ price=price,
45
+ price_type="contract",
46
+ product_source_id=row.ProductSourceId if self.config.use_source_system_id else row.ProductSourceIdString,
47
+ product_source_system_id=row.ProductSourceId,
48
+ supplier_source_id=row.SupplierSourceId if self.config.use_source_system_id else row.SupplierSourceIdString,
49
+ supplier_source_system_id=row.SupplierSourceId,
50
+ timezone="America/Chicago",
51
+ curve_id=row.CurveId,
52
+ contract=row.SourceContractId
53
+ )
54
+ )
55
+ return res
56
+
57
+ def get_factor(self, **kwargs):
58
+ result = 1
59
+ for value in kwargs.values():
60
+ result *= value
61
+ return result
62
+
63
+ async def process_rows(self, rows: List[PEPriceData]) -> Tuple[
64
+ List[SupplyPriceUpdateManyRequest], Dict]:
65
+ mappings = await self.get_mapping_groups()
66
+ new_rows = self.get_price_request(rows)
67
+ product_mappings = mappings.product_groups
68
+ location_mappings = mappings.location_groups
69
+ supplier_mappings = mappings.supplier_groups
70
+ processed_rows = []
71
+ error_dict = {}
72
+
73
+ for idx, row in enumerate(new_rows):
74
+ try:
75
+ terminals = location_mappings.get(row.terminal_source_id)
76
+ if terminals is None:
77
+ raise MappingNotFoundException(f"Missing terminal mapping for source_id: {row.terminal_source_id}")
78
+
79
+ suppliers = supplier_mappings.get(row.supplier_source_id)
80
+ if suppliers is None:
81
+ raise MappingNotFoundException(f"Missing supplier mapping for source_id: {row.supplier_source_id}")
82
+
83
+ products = product_mappings.get(row.product_source_id)
84
+ if products is None:
85
+ raise MappingNotFoundException(f"Missing product mapping for source_id: {row.product_source_id}")
86
+
87
+ expected_combinations = self.get_factor(terminals_length=terminals.length,
88
+ suppliers_length=suppliers.length,
89
+ products_length=products.length)
90
+ row_combinations = []
91
+ for terminal_gravitate_id in terminals.ids:
92
+ for supplier_gravitate_id in suppliers.ids:
93
+ for product_gravitate_id in products.ids:
94
+ new_row = copy.deepcopy(row)
95
+ new_row.terminal_source_id = None
96
+ new_row.supplier_source_id = None
97
+ new_row.product_source_id = None
98
+ new_row.terminal_id = terminal_gravitate_id
99
+ new_row.supplier_id = supplier_gravitate_id
100
+ new_row.product_id = product_gravitate_id
101
+ row_combinations.append(new_row)
102
+ assert len(
103
+ row_combinations) == expected_combinations, f"Expected {expected_combinations} combinations, got {len(row_combinations)}"
104
+ processed_rows.extend(row_combinations)
105
+ except MappingNotFoundException as mnfe:
106
+ error_message = f"Mapping error: {str(mnfe)}"
107
+ logger.error(f"Row {idx}: {error_message}")
108
+ error_dict[idx] = {
109
+ "error_type": "MappingNotFoundException",
110
+ "message": str(mnfe),
111
+ "row_data": row.model_dump()
112
+ }
113
+ continue
114
+ except AssertionError as ae:
115
+ error_message = f"Combination count mismatch: {str(ae)}"
116
+ logger.error(f"Row {idx}: {error_message}")
117
+ error_dict[idx] = {
118
+ "error_type": "AssertionError",
119
+ "message": str(ae),
120
+ "row_data": row.model_dump(),
121
+ "expected_count": expected_combinations,
122
+ "actual_count": len(row_combinations)
123
+ }
124
+ continue
125
+ except KeyError as ke:
126
+ error_message = f"KeyError: {str(ke)}"
127
+ logger.error(f"Row {idx}: {error_message}")
128
+ error_dict[idx] = {
129
+ "error_type": "KeyError",
130
+ "message": str(ke),
131
+ "row_data": row.model_dump()
132
+ }
133
+ continue
134
+ except Exception as e:
135
+ error_message = f"Unexpected error: {str(e)}"
136
+ logger.error(f"Row {idx}: {error_message}")
137
+ error_dict[idx] = {
138
+ "error_type": type(e).__name__,
139
+ "message": str(e),
140
+ "row_data": row.model_dump()
141
+ }
142
+ continue
143
+ return processed_rows, error_dict
144
+
145
+ async def get_mapping_groups(self) -> Groups:
146
+ mappings = await self.pm.get_mappings()
147
+ product_mappings = mappings.product_mappings
148
+ location_mappings = mappings.location_mappings
149
+ supplier_mappings = mappings.supplier_mappings
150
+ terminal_groups = self.pm.group_rows(
151
+ rows=location_mappings,
152
+ external_id_field=self.config.location_external_keys.external_id_field,
153
+ gravitate_id_field=self.config.location_external_keys.gravitate_id_field,
154
+ name_field=self.config.location_external_keys.name_field,
155
+ )
156
+ product_groups = self.pm.group_rows(
157
+ rows=product_mappings,
158
+ external_id_field=self.config.product_external_keys.external_id_field,
159
+ gravitate_id_field=self.config.product_external_keys.gravitate_id_field,
160
+ name_field=self.config.product_external_keys.name_field,
161
+ )
162
+ supplier_groups = self.pm.group_rows(
163
+ rows=supplier_mappings,
164
+ external_id_field=self.config.supplier_external_keys.external_id_field,
165
+ gravitate_id_field=self.config.supplier_external_keys.gravitate_id_field,
166
+ name_field=self.config.supplier_external_keys.name_field,
167
+ )
168
+ return Groups(
169
+ product_groups=product_groups,
170
+ location_groups=terminal_groups,
171
+ supplier_groups=supplier_groups,
172
+ )
173
+
174
+ # TestPipeline deleted on May 20 2025. See previous commits for a copy.
@@ -0,0 +1,47 @@
1
+ from loguru import logger
2
+
3
+ from bb_integrations_lib.gravitate.rita_api import GravitateRitaAPI
4
+ from bb_integrations_lib.models.pipeline_structs import BBDUploadResult
5
+ from bb_integrations_lib.models.rita.audit import ProcessReportV2Status, CreateReportV2, UploadProcessReportFile
6
+ from bb_integrations_lib.protocols.pipelines import Step
7
+
8
+
9
+ class SendProcessReportStep(Step):
10
+ def __init__(self, rita_client: GravitateRitaAPI, trigger: str, *args, **kwargs):
11
+ super().__init__(*args, **kwargs)
12
+ self.rita_client = rita_client
13
+ self.trigger = trigger
14
+
15
+ def describe(self) -> str:
16
+ if self.pipeline_context.file_config is not None:
17
+ return "Upload process report to file config " + self.pipeline_context.file_config.client_name
18
+ else:
19
+ raise RuntimeError("Attempting to use SendProcessReportStep without a previous step setting the file_config")
20
+
21
+ async def execute(self, i: BBDUploadResult) -> BBDUploadResult:
22
+ fc = self.pipeline_context.file_config
23
+ if fc is None or fc.config_id is None:
24
+ raise RuntimeError("Attempting to use SendProcessReportStep but the fileconfig is either not available from context, or does not have its config_id set.")
25
+ logger.info("Uploading process report to RITA...")
26
+ try:
27
+ await self.rita_client.create_process_report(CreateReportV2(
28
+ trigger=self.trigger,
29
+ # If we would be creating an error report, the exception is caught and reporting happens in finish_pipeline
30
+ status=ProcessReportV2Status.stop,
31
+ config_id=self.pipeline_context.file_config.config_id,
32
+ # Logs are one list item per line, newlines already included. Join into one string.
33
+ log=UploadProcessReportFile(file_base_name=f"log", content="".join(self.pipeline_context.logs)),
34
+ included_files = [
35
+ UploadProcessReportFile(file_base_name=name, content=content)
36
+ for name, content in self.pipeline_context.included_files.items()
37
+ ]
38
+ ))
39
+ logger.info("Uploaded.")
40
+ except Exception as e:
41
+ logger.warning("Failed to upload process report.")
42
+
43
+ # Reset logs for use by the next branch, if the pipeline has one.
44
+ self.pipeline_context.logs = []
45
+ self.pipeline_context.included_files = {}
46
+
47
+ return i
@@ -0,0 +1,61 @@
1
+ import os
2
+ from asyncio import sleep
3
+ from datetime import UTC, datetime
4
+
5
+ import loguru
6
+
7
+ from bb_integrations_lib.gravitate.rita_api import GravitateRitaAPI
8
+ from bb_integrations_lib.models.pipeline_structs import StopPipeline
9
+ from bb_integrations_lib.models.rita.config import Config
10
+ from bb_integrations_lib.protocols.pipelines import Step
11
+ from bb_integrations_lib.provider.ftp.client import FTPIntegrationClient
12
+
13
+
14
+ class SFTPRenamerStep(Step):
15
+ def __init__(self, rita_client: GravitateRitaAPI, ftp_client: FTPIntegrationClient, config_id: str,
16
+ halt_early: bool = False, *args, **kwargs):
17
+ super().__init__(*args, **kwargs)
18
+ self.rita_client = rita_client
19
+ self.ftp_client = ftp_client
20
+ self.config_id = config_id
21
+ self.halt_early = halt_early
22
+
23
+ self.config: Config | None = None
24
+ self.directory: str | None = None
25
+ self.file_name: str | None = None
26
+ self.output_name_base: str | None = None
27
+
28
+ async def load_config(self):
29
+ self.config = await self.rita_client.get_config_by_id(self.config_id)
30
+ self.directory = self.config.config.get("directory")
31
+
32
+ if not self.directory:
33
+ raise ValueError("The provided config is missing the `directory` field.")
34
+ self.file_name = self.config.config.get("file_name")
35
+ if not self.file_name:
36
+ raise ValueError("The provided config is missing the `file_name` field.")
37
+ self.output_name_base = self.config.config.get("output_name_base")
38
+ if not self.output_name_base:
39
+ raise ValueError("The provided config is missing the `output_name_base` field.")
40
+
41
+ def describe(self) -> str:
42
+ return "Rename files in FTP directory."
43
+
44
+ async def execute(self, i: None) -> None:
45
+ await self.load_config()
46
+
47
+ filenames = self.ftp_client.list_files(self.directory)
48
+ found_any = False
49
+ for filename in filenames:
50
+ if self.file_name in filename:
51
+ found_any = True
52
+ file_extension = os.path.splitext(filename)[1]
53
+ date = datetime.now(UTC).strftime("%Y%m%d%H%M%S")
54
+ new_filename = f"{self.output_name_base}{date}.{file_extension}"
55
+ old = os.path.join(self.directory, filename)
56
+ new = os.path.join(self.directory, new_filename)
57
+ self.ftp_client.rename_file(old, new)
58
+ loguru.logger.info(f"File renamed: {old} -> {new}")
59
+ await sleep(1)
60
+ if not found_any and self.halt_early:
61
+ raise StopPipeline("No files to rename.")
@@ -0,0 +1,75 @@
1
+ from datetime import datetime, timedelta, UTC
2
+
3
+ import pandas as pd
4
+ from loguru import logger
5
+
6
+ from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
7
+ from bb_integrations_lib.protocols.pipelines import Step
8
+ from bb_integrations_lib.shared.model import GetOrderBolsAndDropsRequest
9
+ from bb_integrations_lib.util.config.manager import GlobalConfigManager
10
+
11
+
12
+ class TRTouchUpStep(Step):
13
+ """Superclass for tank reading touchup steps"""
14
+ def __init__(self, step_configuration: dict) -> None:
15
+ super().__init__(step_configuration)
16
+ self.mode = step_configuration.get("mode", "production")
17
+
18
+ class NouriaDayDropsTRTouchUpStep(TRTouchUpStep):
19
+ """
20
+ Touch up a standardized tank readings report to add a "Order Number + Drop Index" column, which Nouria uses to get
21
+ tank levels just before they dropped product.
22
+
23
+ Note that the tenant/environment is hard coded to Nouria.
24
+
25
+ """
26
+ def __init__(self, step_configuration: dict) -> None:
27
+ super().__init__(step_configuration)
28
+
29
+ self.tenant_name = "Nouria"
30
+ gcm = GlobalConfigManager().get_environment(self.tenant_name)
31
+ self.secrets = gcm.prod if self.mode == "production" else gcm.test
32
+ self.sd_client = GravitateSDAPI(
33
+ base_url=self.secrets.sd.base_url,
34
+ client_id=self.secrets.sd.client_id,
35
+ client_secret=self.secrets.sd.client_secret,
36
+ )
37
+
38
+ def describe(self) -> str:
39
+ return "Touchup Nouria tank readings for day drops report"
40
+
41
+ async def execute(self, i: pd.DataFrame) -> pd.DataFrame:
42
+ df = i.copy()
43
+ when = datetime.now(UTC)
44
+ recent_drops_resp = await self.sd_client.get_bols_and_drops(GetOrderBolsAndDropsRequest(
45
+ order_date_start=when - timedelta(days=1),
46
+ order_date_end=when
47
+ ))
48
+ recent_drops = recent_drops_resp.json()
49
+ dfg = df.groupby(["Store Number", "Tank Id"])
50
+ out_series = []
51
+ for bol in recent_drops:
52
+ for drop_idx, drop in enumerate(bol["drops"]):
53
+ site_no = str(drop["location"])
54
+ tank_id = str(drop["tank_id"])
55
+ try:
56
+ # Gets the readings rows for this drop's specific site/tank
57
+ group = dfg.get_group((site_no, tank_id))
58
+ # Keep reads that are before the before_stick timestamp, then get the index of the reading
59
+ # row that has the most recent (closest to before_stick_time) timestamp.
60
+ idx = group[group["Read Time"] < drop["before_stick_time"]]["Read Time"].idxmax()
61
+ # Copy it to avoid pandas grumbling
62
+ row = df.iloc[idx].copy()
63
+ row["Order Number + Drop Index"] = f"{bol['order_number']}-{drop_idx + 1}"
64
+ out_series.append(row)
65
+ except KeyError:
66
+ logger.warning(f"{site_no}, tank {tank_id} in drops but not tank readings, skipping record")
67
+ except ValueError as e:
68
+ logger.warning(f"{site_no}, tank {tank_id} could not be processed: {e}")
69
+ # If there are no data rows, create a fake empty df with the same columns the actual data would have
70
+ # (since there are no series items to infer headers from, the whole file would be empty)
71
+ if len(out_series) == 0:
72
+ empty_df = pd.DataFrame(data=None, columns=df.columns + ["Order Number + Drop Index"])
73
+ return empty_df
74
+ # Otherwise reconstitute the relevant rows (with newly added order number/drop index column)
75
+ return pd.DataFrame.from_records(out_series).sort_values(["Store Number", "Tank Id"])
@@ -0,0 +1,16 @@
1
+ from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
2
+ from bb_integrations_lib.protocols.pipelines import Step
3
+ from bb_integrations_lib.shared.model import SDSupplierInvoiceCreateRequest
4
+
5
+
6
+ class UploadSupplierInvoiceStep(Step):
7
+ def __init__(self, sd_client: GravitateSDAPI, *args, **kwargs):
8
+ super().__init__(*args, **kwargs)
9
+ self.sd_client = sd_client
10
+
11
+ def describe(self) -> str:
12
+ return "Upload a supplier invoice to S&D"
13
+
14
+ async def execute(self, i: SDSupplierInvoiceCreateRequest) -> None:
15
+ res = await self.sd_client.upload_supplier_invoice(i)
16
+ res.raise_for_status()
@@ -0,0 +1,44 @@
1
+ from io import BytesIO
2
+
3
+ from bb_integrations_lib.gravitate.rita_api import GravitateRitaAPI
4
+ from bb_integrations_lib.models.rita.email import EmailData, EmailAttachment
5
+ from bb_integrations_lib.pipelines.steps.send_rita_email_step import SendRitaEmailStep
6
+ from bb_integrations_lib.shared.model import RawData
7
+
8
+
9
+ class SendAttachedInRitaEmailStep(SendRitaEmailStep):
10
+ def __init__(self, rita_client: GravitateRitaAPI, to: str | list[str], html_content: str, subject: str,
11
+ timeout: float = 10.0, *args, **kwargs):
12
+ """
13
+ Send one or more RawData objects as an email attachment via RITA.
14
+
15
+ :param rita_client: Instantiated RITA API client using an API key with email.send scope.
16
+ :param to: Email address(es) to send the email to.
17
+ :param html_content: HTML content (body) of the email.
18
+ :param subject: Subject of the email.
19
+ :param timeout: The maximum amount of time allowed to send the email. Large emails may take longer to send than
20
+ the default.
21
+ """
22
+ super().__init__(rita_client=rita_client, timeout=timeout, *args, **kwargs)
23
+ self.to = to
24
+ self.html_content = html_content
25
+ self.subject = subject
26
+
27
+ def describe(self):
28
+ return "Send email via RITA with RawData(s) from step input attached"
29
+
30
+ async def execute(self, i: RawData | list[RawData]):
31
+ if isinstance(i, RawData):
32
+ i = [i]
33
+ ed = EmailData(
34
+ to=self.to,
35
+ html_content=self.html_content,
36
+ subject=self.subject,
37
+ attachments=[
38
+ EmailAttachment(
39
+ file_name=rd.file_name,
40
+ file_data=rd.data.getvalue() if type(rd.data) == BytesIO else rd.data,
41
+ ) for rd in i
42
+ ]
43
+ )
44
+ await super().execute(ed)
@@ -0,0 +1,34 @@
1
+ from bb_integrations_lib.gravitate.rita_api import GravitateRitaAPI
2
+ from bb_integrations_lib.models.rita.email import EmailData
3
+ from bb_integrations_lib.protocols.pipelines import Step
4
+
5
+
6
+ class SendRitaEmailStep(Step):
7
+ def __init__(self, rita_client: GravitateRitaAPI, timeout: float = 10.0, raise_on_error: bool = True,
8
+ email_data_override: EmailData | None = None, *args, **kwargs):
9
+ """
10
+ Instantiate a pipeline step that sends an email via RITA.
11
+
12
+ :param rita_client: Instantiated RITA API client using an API key with email.send scope.
13
+ :param timeout: The maximum amount of time allowed to send the email. Large emails may take longer to send than
14
+ the default.
15
+ :param raise_on_error: Whether to raise an exception if the email send HTTP response indicates an error.
16
+ :param email_data_override: Explicitly specified EmailData object to use instead of step input. This allows
17
+ specifying a static email for either testing or notifications.
18
+ """
19
+ super().__init__(*args, **kwargs)
20
+ self.rita_client = rita_client
21
+ self.timeout = timeout
22
+ self.raise_on_error = raise_on_error
23
+ self.ed_override = email_data_override
24
+
25
+ def describe(self) -> str:
26
+ return "Send email via RITA"
27
+
28
+ async def execute(self, i: EmailData):
29
+ if self.ed_override:
30
+ resp = await self.rita_client.send_email(self.ed_override, self.timeout)
31
+ else:
32
+ resp = await self.rita_client.send_email(i, self.timeout)
33
+ if self.raise_on_error:
34
+ resp.raise_for_status()
@@ -0,0 +1,24 @@
1
+ import asyncio
2
+
3
+ from loguru import logger
4
+
5
+ from bb_integrations_lib.protocols.pipelines import Step
6
+
7
+
8
+ class SleepStep(Step):
9
+ def __init__(self, duration: float, *args, **kwargs):
10
+ """
11
+ Equivalent to ``asyncio.sleep(duration)``.
12
+
13
+ :param duration: The number of seconds to sleep.
14
+ """
15
+ super().__init__(*args, **kwargs)
16
+
17
+ self.duration = duration
18
+
19
+ def describe(self) -> str:
20
+ return f"Wait a specified number of seconds"
21
+
22
+ async def execute(self, data: None) -> None:
23
+ logger.debug(f"Sleeping for {self.duration} seconds...")
24
+ await asyncio.sleep(self.duration)
File without changes