bb-integrations-library 3.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (217) hide show
  1. bb_integrations_lib/__init__.py +0 -0
  2. bb_integrations_lib/converters/__init__.py +0 -0
  3. bb_integrations_lib/gravitate/__init__.py +0 -0
  4. bb_integrations_lib/gravitate/base_api.py +20 -0
  5. bb_integrations_lib/gravitate/model.py +29 -0
  6. bb_integrations_lib/gravitate/pe_api.py +122 -0
  7. bb_integrations_lib/gravitate/rita_api.py +552 -0
  8. bb_integrations_lib/gravitate/sd_api.py +572 -0
  9. bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
  10. bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
  11. bb_integrations_lib/gravitate/testing/__init__.py +0 -0
  12. bb_integrations_lib/gravitate/testing/builder.py +55 -0
  13. bb_integrations_lib/gravitate/testing/openapi.py +70 -0
  14. bb_integrations_lib/gravitate/testing/util.py +274 -0
  15. bb_integrations_lib/mappers/__init__.py +0 -0
  16. bb_integrations_lib/mappers/prices/__init__.py +0 -0
  17. bb_integrations_lib/mappers/prices/model.py +106 -0
  18. bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
  19. bb_integrations_lib/mappers/prices/protocol.py +20 -0
  20. bb_integrations_lib/mappers/prices/util.py +61 -0
  21. bb_integrations_lib/mappers/rita_mapper.py +523 -0
  22. bb_integrations_lib/models/__init__.py +0 -0
  23. bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
  24. bb_integrations_lib/models/enums.py +28 -0
  25. bb_integrations_lib/models/pipeline_structs.py +76 -0
  26. bb_integrations_lib/models/probe/probe_event.py +20 -0
  27. bb_integrations_lib/models/probe/request_data.py +431 -0
  28. bb_integrations_lib/models/probe/resume_token.py +7 -0
  29. bb_integrations_lib/models/rita/audit.py +113 -0
  30. bb_integrations_lib/models/rita/auth.py +30 -0
  31. bb_integrations_lib/models/rita/bucket.py +17 -0
  32. bb_integrations_lib/models/rita/config.py +188 -0
  33. bb_integrations_lib/models/rita/constants.py +19 -0
  34. bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
  35. bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
  36. bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
  37. bb_integrations_lib/models/rita/crossroads_network.py +41 -0
  38. bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
  39. bb_integrations_lib/models/rita/email.py +39 -0
  40. bb_integrations_lib/models/rita/issue.py +63 -0
  41. bb_integrations_lib/models/rita/mapping.py +227 -0
  42. bb_integrations_lib/models/rita/probe.py +58 -0
  43. bb_integrations_lib/models/rita/reference_data.py +110 -0
  44. bb_integrations_lib/models/rita/source_system.py +9 -0
  45. bb_integrations_lib/models/rita/workers.py +76 -0
  46. bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
  47. bb_integrations_lib/models/sd/get_order.py +301 -0
  48. bb_integrations_lib/models/sd/orders.py +18 -0
  49. bb_integrations_lib/models/sd_api.py +115 -0
  50. bb_integrations_lib/pipelines/__init__.py +0 -0
  51. bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
  52. bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
  53. bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
  54. bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
  55. bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
  56. bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
  57. bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
  58. bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
  59. bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
  60. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
  61. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
  62. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
  63. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
  64. bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
  65. bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
  66. bb_integrations_lib/pipelines/shared/__init__.py +0 -0
  67. bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
  68. bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
  69. bb_integrations_lib/pipelines/steps/__init__.py +0 -0
  70. bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
  71. bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
  72. bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
  73. bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
  74. bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
  75. bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
  76. bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
  77. bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
  78. bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
  79. bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
  80. bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
  81. bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
  82. bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
  83. bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
  84. bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
  85. bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
  86. bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
  87. bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
  88. bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
  89. bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
  90. bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
  91. bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
  92. bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
  93. bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
  94. bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
  95. bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
  96. bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
  97. bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
  98. bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
  99. bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
  100. bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
  101. bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
  102. bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
  103. bb_integrations_lib/pipelines/steps/null_step.py +15 -0
  104. bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
  105. bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
  106. bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
  107. bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
  108. bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
  109. bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
  110. bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
  111. bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
  112. bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
  113. bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
  114. bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
  115. bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
  116. bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
  117. bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
  118. bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
  119. bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
  120. bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
  121. bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
  122. bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
  123. bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
  124. bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
  125. bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
  126. bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
  127. bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
  128. bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
  129. bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
  130. bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
  131. bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
  132. bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
  133. bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
  134. bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
  135. bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
  136. bb_integrations_lib/protocols/__init__.py +0 -0
  137. bb_integrations_lib/protocols/flat_file.py +210 -0
  138. bb_integrations_lib/protocols/gravitate_client.py +104 -0
  139. bb_integrations_lib/protocols/pipelines.py +697 -0
  140. bb_integrations_lib/provider/__init__.py +0 -0
  141. bb_integrations_lib/provider/api/__init__.py +0 -0
  142. bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
  143. bb_integrations_lib/provider/api/cargas/client.py +43 -0
  144. bb_integrations_lib/provider/api/cargas/model.py +49 -0
  145. bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
  146. bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
  147. bb_integrations_lib/provider/api/dtn/client.py +128 -0
  148. bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
  149. bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
  150. bb_integrations_lib/provider/api/keyvu/client.py +30 -0
  151. bb_integrations_lib/provider/api/keyvu/model.py +149 -0
  152. bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
  153. bb_integrations_lib/provider/api/macropoint/client.py +28 -0
  154. bb_integrations_lib/provider/api/macropoint/model.py +40 -0
  155. bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
  156. bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
  157. bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
  158. bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
  159. bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
  160. bb_integrations_lib/provider/api/platform_science/client.py +147 -0
  161. bb_integrations_lib/provider/api/platform_science/model.py +82 -0
  162. bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
  163. bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
  164. bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
  165. bb_integrations_lib/provider/api/telapoint/client.py +68 -0
  166. bb_integrations_lib/provider/api/telapoint/model.py +178 -0
  167. bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
  168. bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
  169. bb_integrations_lib/provider/aws/__init__.py +0 -0
  170. bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
  171. bb_integrations_lib/provider/aws/s3/client.py +126 -0
  172. bb_integrations_lib/provider/ftp/__init__.py +0 -0
  173. bb_integrations_lib/provider/ftp/client.py +140 -0
  174. bb_integrations_lib/provider/ftp/interface.py +273 -0
  175. bb_integrations_lib/provider/ftp/model.py +76 -0
  176. bb_integrations_lib/provider/imap/__init__.py +0 -0
  177. bb_integrations_lib/provider/imap/client.py +228 -0
  178. bb_integrations_lib/provider/imap/model.py +3 -0
  179. bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
  180. bb_integrations_lib/provider/sqlserver/client.py +106 -0
  181. bb_integrations_lib/secrets/__init__.py +4 -0
  182. bb_integrations_lib/secrets/adapters.py +98 -0
  183. bb_integrations_lib/secrets/credential_models.py +222 -0
  184. bb_integrations_lib/secrets/factory.py +85 -0
  185. bb_integrations_lib/secrets/providers.py +160 -0
  186. bb_integrations_lib/shared/__init__.py +0 -0
  187. bb_integrations_lib/shared/exceptions.py +25 -0
  188. bb_integrations_lib/shared/model.py +1039 -0
  189. bb_integrations_lib/shared/shared_enums.py +510 -0
  190. bb_integrations_lib/storage/README.md +236 -0
  191. bb_integrations_lib/storage/__init__.py +0 -0
  192. bb_integrations_lib/storage/aws/__init__.py +0 -0
  193. bb_integrations_lib/storage/aws/s3.py +8 -0
  194. bb_integrations_lib/storage/defaults.py +72 -0
  195. bb_integrations_lib/storage/gcs/__init__.py +0 -0
  196. bb_integrations_lib/storage/gcs/client.py +8 -0
  197. bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
  198. bb_integrations_lib/storage/gcsmanager/client.py +8 -0
  199. bb_integrations_lib/storage/setup.py +29 -0
  200. bb_integrations_lib/util/__init__.py +0 -0
  201. bb_integrations_lib/util/cache/__init__.py +0 -0
  202. bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
  203. bb_integrations_lib/util/cache/protocol.py +9 -0
  204. bb_integrations_lib/util/config/__init__.py +0 -0
  205. bb_integrations_lib/util/config/manager.py +391 -0
  206. bb_integrations_lib/util/config/model.py +41 -0
  207. bb_integrations_lib/util/exception_logger/__init__.py +0 -0
  208. bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
  209. bb_integrations_lib/util/exception_logger/test.py +114 -0
  210. bb_integrations_lib/util/utils.py +364 -0
  211. bb_integrations_lib/workers/__init__.py +0 -0
  212. bb_integrations_lib/workers/groups.py +13 -0
  213. bb_integrations_lib/workers/rpc_worker.py +50 -0
  214. bb_integrations_lib/workers/topics.py +20 -0
  215. bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
  216. bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
  217. bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
@@ -0,0 +1,34 @@
1
+ from datetime import datetime, UTC
2
+ from typing import Dict
3
+
4
+ import pytz
5
+ from bb_integrations_lib.gravitate.rita_api import GravitateRitaAPI
6
+ from bb_integrations_lib.models.rita.config import MaxSync
7
+ from bb_integrations_lib.protocols.pipelines import Step
8
+ from dateutil.parser import parse
9
+
10
+
11
+ class GetLatestSyncDate(Step):
12
+ def __init__(self, step_configuration: Dict[str, str]):
13
+ super().__init__(step_configuration)
14
+ self.tenant_name = step_configuration['tenant_name']
15
+ self.mode = step_configuration.get('mode', 'production')
16
+ self.rita_client: GravitateRitaAPI = self.config_manager.environment_from_name(self.tenant_name,
17
+ self.mode).rita.api_client
18
+ self.config_id = step_configuration.get("config_id")
19
+ self.test_override = step_configuration.get("test_override", None)
20
+
21
+ def describe(self) -> str:
22
+ return "Get Latest Sync Date"
23
+
24
+ async def execute(self, i: str) -> datetime:
25
+ return await self.get_last_sync_date()
26
+
27
+ async def get_last_sync_date(self) -> datetime:
28
+ if self.test_override:
29
+ return parse(self.test_override).replace(tzinfo=pytz.UTC)
30
+ if not self.config_id or self.config_id is None:
31
+ return datetime.now(UTC)
32
+ max_sync: MaxSync = await self.rita_client.get_config_max_sync(config_id=self.config_id)
33
+ dt = max_sync.max_sync_date.replace(tzinfo=pytz.UTC)
34
+ return dt
@@ -0,0 +1,30 @@
1
+ from typing import Dict, Any
2
+
3
+ import loguru
4
+ from httpx import HTTPStatusError
5
+ from bb_integrations_lib.protocols.pipelines import Step
6
+ from bb_integrations_lib.shared.model import FileReference, FileType
7
+ from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
8
+
9
+
10
+ class BBDImportPayrollStep(Step):
11
+ def __init__(self, sd_client: GravitateSDAPI, output_file_path: str, bbd_date_argument: str, *args, **kwargs):
12
+ super().__init__(*args, **kwargs)
13
+ self.sd_client = sd_client
14
+ self.temp_file_path = output_file_path
15
+ self.bbd_date_argument = bbd_date_argument
16
+
17
+ def describe(self) -> str:
18
+ return "Import Payroll from BBD"
19
+
20
+ async def execute(self, _: Any) -> FileReference:
21
+ try:
22
+ payroll_resp = await self.sd_client.payroll_export(self.bbd_date_argument)
23
+ # Download the file from the response
24
+ if payroll_resp.status_code == 200:
25
+ with open(self.temp_file_path, "wb") as f:
26
+ f.write(payroll_resp.content)
27
+
28
+ return FileReference(self.temp_file_path, FileType.excel)
29
+ except HTTPStatusError as e:
30
+ loguru.logger.error(e.response.content)
@@ -0,0 +1,138 @@
1
+ import enum
2
+ from datetime import datetime, UTC, timedelta
3
+ from typing import Any, List
4
+
5
+ import pandas_gbq
6
+ from google.oauth2 import service_account
7
+ from loguru import logger
8
+ import pandas as pd
9
+ from pymongo import MongoClient
10
+ from pymongo.synchronous.database import Database
11
+
12
+ from bb_integrations_lib.models.pipeline_structs import StopPipeline, StopBranch
13
+ from bb_integrations_lib.protocols.pipelines import Step
14
+ from bb_integrations_lib.util.utils import init_db
15
+
16
+ class OrderType(enum.Enum):
17
+ ANY = enum.auto()
18
+ NOT_BACKHAUL = enum.auto()
19
+ BACKHAUL_ONLY = enum.auto()
20
+
21
+ def to_mongo_query(self):
22
+ match self:
23
+ case OrderType.ANY:
24
+ return {}
25
+ case OrderType.NOT_BACKHAUL:
26
+ return {"type": {"$ne": "backhaul"}}
27
+ case OrderType.BACKHAUL_ONLY:
28
+ return {"type": "backhaul"}
29
+
30
+ def __str__(self) -> str:
31
+ return {
32
+ OrderType.ANY: "any",
33
+ OrderType.NOT_BACKHAUL: "not backhaul",
34
+ OrderType.BACKHAUL_ONLY: "backhaul only",
35
+ }[self]
36
+
37
+ class GetOrderNumbersToExportStep(Step):
38
+ def __init__(self, mongo_database: Database, order_type_filter: OrderType, exported_order_table_name: str,
39
+ project_id: str, gcp_credentials_file: str, testing_date_min: datetime | None = None,
40
+ testing_date_max: datetime | None = None, lookback_days: int = 60, use_old_change_query: bool = False,
41
+ check_for_updated_orders: bool = True, *args, **kwargs):
42
+ super().__init__(*args, **kwargs)
43
+ self.mongo_database = mongo_database
44
+ self.order_type_filter = order_type_filter
45
+ self.exported_order_table_name = exported_order_table_name
46
+ self.project_id = project_id
47
+ self.gcp_credentials_file = gcp_credentials_file
48
+ self.testing_date_min = testing_date_min
49
+ self.testing_date_max = testing_date_max
50
+ self.lookback_days = lookback_days
51
+ self.use_old_change_query = use_old_change_query
52
+ self.check_for_updated_orders = check_for_updated_orders
53
+ if not self.gcp_credentials_file.endswith(".json"):
54
+ self.gcp_credentials_file += ".json"
55
+
56
+ def describe(self) -> str:
57
+ return "Determine which order numbers to export."
58
+
59
+ async def execute(self, i: Any) -> List[int]:
60
+ collection = self.mongo_database["order_v2"]
61
+ if not self.testing_date_min and not self.testing_date_max:
62
+ lookback_date = datetime.now(UTC) - timedelta(days=self.lookback_days)
63
+ date_query = {'$or': [
64
+ {'updated_on': {'$gte': lookback_date}},
65
+ {'movement_updated': {'$gte': lookback_date}}
66
+ ]}
67
+ logger.debug(f"Looking for orders in the previous {self.lookback_days} days.")
68
+ elif self.testing_date_min and self.testing_date_max:
69
+ date_query = {'$or': [
70
+ {'updated_on': {'$gte': self.testing_date_min, '$lte': self.testing_date_max}},
71
+ {'movement_updated': {'$gte': self.testing_date_min, '$lte': self.testing_date_max}}
72
+ ]}
73
+ logger.debug(f"Looking for orders between {self.testing_date_min} and {self.testing_date_max}")
74
+ else:
75
+ raise RuntimeError("testing_date_max and testing_date_min must provided together, or both not provided.")
76
+
77
+ if self.use_old_change_query:
78
+ change_query = {"change_date": "$updated_on"}
79
+ else:
80
+ change_query = {"change_date": { "$max": ["$updated_on", "$movement_updated"] }}
81
+
82
+ # Over time the number of orders will grow. To keep step performance constant we're only going to look at
83
+ # orders updated within the last 60 days
84
+ assert isinstance(self.order_type_filter, OrderType)
85
+ logger.debug(f"Searching for orders of type: {str(self.order_type_filter)}")
86
+ orders = list(collection.find({**date_query,
87
+ **self.order_type_filter.to_mongo_query(),
88
+ "state": "complete"},
89
+ {
90
+ "_id": 0,
91
+ "number": 1,
92
+ **change_query,
93
+ }))
94
+ logger.debug(f"There are {len(orders)} orders updated in the last {self.lookback_days} days.")
95
+ if len(orders) == 0:
96
+ raise StopBranch()
97
+ # We need to look up the list of exported orders from GBQ
98
+ credentials = service_account.Credentials.from_service_account_file(self.gcp_credentials_file)
99
+ sql = (f"select order_number as number, max(date) as exported_date"
100
+ f" from `{self.exported_order_table_name}`"
101
+ f" group by order_number;")
102
+ gbq_df = pandas_gbq.read_gbq(sql, project_id=self.project_id, credentials=credentials, progress_bar_type=None)
103
+ logger.debug(f"Received {gbq_df.shape[0]} GBQ records.")
104
+
105
+ # Create a dataframe from the list of orders and do a left join on the orders exported from GBQ
106
+ orders_df = pd.DataFrame.from_records(orders)
107
+ joined = pd.merge(orders_df, gbq_df, on="number", how="left")
108
+
109
+ never_exported = joined[joined["exported_date"].isna()]
110
+ logger.debug(f"There are {never_exported.shape[0]} new orders to export.")
111
+
112
+ if self.check_for_updated_orders:
113
+ updated_after_export = joined[joined["exported_date"] < joined["change_date"]]
114
+ logger.debug(f"There are {updated_after_export.shape[0]} orders that have been updated and need to be exported again.")
115
+ to_export = set(never_exported["number"]).union(set(updated_after_export["number"]))
116
+ else:
117
+ to_export = set(never_exported["number"])
118
+
119
+ output = list(to_export)
120
+ logger.debug(output)
121
+ if len(output) == 0:
122
+ # There are no orders to export. Cancel the pipeline this step is part of.
123
+ raise StopBranch()
124
+ return output
125
+
126
+
127
+ if __name__ == "__main__":
128
+ import asyncio
129
+ async def main():
130
+ s = GetOrderNumbersToExportStep(
131
+ mongo_database=MongoClient("mongodb conn str")["db_name"],
132
+ order_type_filter=OrderType.ANY,
133
+ exported_order_table_name="gravitate-harms-prod.harms_order_exports.exported_backhaul_orders",
134
+ project_id="gravitate-harms-prod",
135
+ gcp_credentials_file="google.credentials.json"
136
+ )
137
+ await s.execute(None)
138
+ asyncio.run(main())
@@ -0,0 +1,46 @@
1
+ import asyncio
2
+ from typing import Dict
3
+
4
+ import pandas as pd
5
+
6
+ from bb_integrations_lib.protocols.pipelines import Step
7
+ from bb_integrations_lib.shared.model import FileReference, FileType, RawData
8
+ from pandas import DataFrame
9
+
10
+
11
+ class LoadFileToDataFrameStep(Step):
12
+ def __init__(self, sheet_name: str | int = 0, file_type: FileType | None = None, *args, **kwargs):
13
+ super().__init__(*args, **kwargs)
14
+ self.sheet_name = sheet_name
15
+ self.file_type = file_type
16
+
17
+ def describe(self) -> str:
18
+ return "Load file into dataframe"
19
+
20
+ async def execute(self, i: FileReference | RawData) -> DataFrame:
21
+ if isinstance(i, FileReference):
22
+ if i.file_type == FileType.excel:
23
+ if i.sheet_name is None:
24
+ return pd.read_excel(i.file_path, sheet_name=0)
25
+ else:
26
+ return pd.read_excel(i.file_path, sheet_name=i.sheet_name)
27
+ elif i.file_type == FileType.csv:
28
+ return pd.read_csv(i.file_path)
29
+ else:
30
+ raise NotImplementedError()
31
+ elif isinstance(i, RawData):
32
+ if self.file_type is not None:
33
+ if self.file_type == "csv":
34
+ return pd.read_csv(i.data)
35
+ if i.file_name.endswith("csv"):
36
+ return pd.read_csv(i.data)
37
+ elif i.file_name.endswith("xlsx") or i.file_name.endswith("xls"):
38
+ return pd.read_excel(i.data, sheet_name=0)
39
+
40
+ if __name__ == "__main__":
41
+ async def main():
42
+ input = FileReference("/home/ben-allen/Downloads/herdrich-payroll-test.xlsx", FileType.excel, "payroll")
43
+ output = await LoadFileToDataFrameStep().execute(input)
44
+ print(output.head(5))
45
+
46
+ asyncio.run(main())
@@ -0,0 +1,172 @@
1
+ from time import sleep
2
+ from typing import Any, AsyncIterator
3
+
4
+ from loguru import logger
5
+
6
+ from bb_integrations_lib.gravitate.rita_api import GravitateRitaAPI
7
+ from bb_integrations_lib.models.pipeline_structs import NoPipelineData
8
+ from bb_integrations_lib.protocols.pipelines import GeneratorStep
9
+ from bb_integrations_lib.provider.imap.client import IMAPClient
10
+ from bb_integrations_lib.secrets import IMAPCredential
11
+ from bb_integrations_lib.secrets.credential_models import IMAPAuthSimple
12
+ from bb_integrations_lib.shared.model import RawData, FileConfigRawData
13
+
14
+
15
+ class LoadIMAPAttachmentStep(GeneratorStep):
16
+ def __init__(
17
+ self,
18
+ rita_client: GravitateRitaAPI,
19
+ imap_client: IMAPClient,
20
+ attachment_extension: str,
21
+ to_email_address: str | None = None,
22
+ from_email_address: str | None = None,
23
+ delivered_to_email_address: str | None = None,
24
+ retries: int = 3,
25
+ email_subject: str | None = None,
26
+ bucket_name: str | None = None,
27
+ config_names: list[str] | None = None,
28
+ raise_on_no_data: bool = False,
29
+ *args, **kwargs
30
+
31
+ ):
32
+ """
33
+ Load attachments from an IMAP folder / mailbox. The search criteriae are reductive; more filters are ANDed and
34
+ will result in fewer results. If no filters are provided, the default is to search for all emails in the inbox.
35
+
36
+ :param rita_client: The RITA client to retrieve fileconfigs with.
37
+ :param imap_client: The IMAP account which owns the mailbox.
38
+ :param attachment_extension: Sets the extension on the returned RawData.
39
+ :param to_email_address: Matches emails sent TO this address, if provided.
40
+ :param from_email_address: Matches emails sent FROM this address, if provided.
41
+ :param delivered_to_email_address: Matches emails "Delivered-To" this address, if provided. Differs from the
42
+ ``to_email_address`` argument - this matches on the Delivered-To mail header, which some email services set if
43
+ they are automatically forwarding from their mailbox to ours.
44
+ :param retries: How many times to retry a mail query that fails.
45
+ :param email_subject: The subject of the email to search for.
46
+ :param bucket_name: The config bucket that holds the config_names configs.
47
+ :param config_names: The names of FileConfigs to use to search for.
48
+ :param raise_on_no_data: Whether to raise an error when there are no matching emails.
49
+ """
50
+ super().__init__(*args, **kwargs)
51
+ self.rita_client = rita_client
52
+ self.imap_client = imap_client
53
+
54
+ self.to_email_address = to_email_address
55
+ self.from_email_address = from_email_address
56
+ self.delivered_to_email_address = delivered_to_email_address
57
+ self.email_subject = email_subject
58
+ self.attachment_extension = attachment_extension
59
+ self.retries = retries
60
+ criteria_parts = ['UNSEEN']
61
+
62
+ if self.from_email_address:
63
+ criteria_parts.append(f'FROM {self.from_email_address}')
64
+
65
+ if self.delivered_to_email_address:
66
+ criteria_parts.append(f'HEADER Delivered-To {self.delivered_to_email_address}')
67
+ if self.to_email_address:
68
+ criteria_parts.append(f'TO {self.to_email_address}')
69
+
70
+ if self.email_subject:
71
+ if ' ' in self.email_subject:
72
+ criteria_parts.append(f'SUBJECT "{self.email_subject}"')
73
+ else:
74
+ criteria_parts.append(f'SUBJECT {self.email_subject}')
75
+ self.criteria = '(' + ' '.join(criteria_parts) + ')'
76
+
77
+ self.bucket_name = bucket_name
78
+ self.config_names = config_names
79
+
80
+ self.raise_on_no_data = raise_on_no_data
81
+
82
+ def describe(self) -> str:
83
+ return "Get attachments from IMAP folder"
84
+
85
+ async def generator(self, i: Any) -> AsyncIterator[RawData]:
86
+ for config_name in self.config_names:
87
+ async for result in self.load_using_config(config_name):
88
+ yield result
89
+
90
+ async def load_using_config(self, config_name: str) -> AsyncIterator[RawData]:
91
+ file_config = (
92
+ await self.rita_client.get_fileconfig_by_name(self.bucket_name, config_name)
93
+ )[config_name]
94
+ logger.info(f"Searching with criteria {self.criteria}")
95
+ message_indexes = self.imap_client.search(self.criteria)
96
+ logger.info(f"Found {len(message_indexes)} new emails in inbox meeting search criteria")
97
+ if self.raise_on_no_data and not message_indexes:
98
+ raise NoPipelineData("No new emails found in inbox meeting search criteria")
99
+ for idx in message_indexes:
100
+ logger.info(f"Fetching mail {idx}")
101
+ for retry in range(self.retries):
102
+ try:
103
+ message = self.imap_client.fetch(idx)
104
+ attachment_rd = self.imap_client.get_attachment_from_message(
105
+ message,
106
+ extension=self.attachment_extension,
107
+ return_rawdata=True
108
+ )
109
+ if attachment_rd is not None:
110
+ logger.info(f"Fetched attachment from email id: {idx}")
111
+ if file_config:
112
+ attachment_rd = FileConfigRawData(
113
+ file_name=attachment_rd.file_name,
114
+ data=attachment_rd.data,
115
+ file_config=file_config
116
+ )
117
+ self.pipeline_context.file_config = file_config
118
+ yield attachment_rd
119
+ break
120
+ else:
121
+ raise Exception("Returned attachment was None")
122
+ except Exception as e:
123
+ logger.error(f"Error reading data from mail: {e}")
124
+ sleep(3)
125
+ self.imap_client.mark_unseen(idx)
126
+
127
+ @staticmethod
128
+ def _unplus_email(email_str: str) -> str:
129
+ """
130
+ Remove plus-addressing from an email to get the 'base' email account. If there is no plus, the original email
131
+ string is returned. Note that in either case, the email must have an @ symbol in it.
132
+ """
133
+ split_at = email_str.split("@")
134
+ if len(split_at) != 2:
135
+ raise ValueError(f"Email address '{email_str}' is not in the expected format")
136
+ first_half = split_at[0]
137
+ has_plus = "+" in first_half
138
+ if has_plus:
139
+ unplussed = first_half[:first_half.rfind("+")]
140
+ return f"{unplussed}@{split_at[1]}"
141
+ return email_str
142
+
143
+
144
+ if __name__ == "__main__":
145
+ async def main():
146
+ step = LoadIMAPAttachmentStep(
147
+ rita_client=GravitateRitaAPI(
148
+ base_url="",
149
+ client_id="",
150
+ client_secret=""
151
+ ),
152
+ imap_client=IMAPClient(
153
+ credentials=IMAPCredential(
154
+ host="",
155
+ port=993,
156
+ email_address="",
157
+ auth=IMAPAuthSimple(
158
+ password="",
159
+ )
160
+ )
161
+ ),
162
+ attachment_extension=".csv",
163
+ bucket_name="/Inventory",
164
+ config_names=["wawa"]
165
+ )
166
+ async for result in step.generator(None):
167
+ print(result)
168
+
169
+
170
+ import asyncio
171
+
172
+ asyncio.run(main())
@@ -0,0 +1,68 @@
1
+ import json
2
+ from typing import List
3
+
4
+ from loguru import logger
5
+
6
+ from bb_integrations_lib.gravitate.pe_api import GravitatePEAPI
7
+ from bb_integrations_lib.models.pipeline_structs import UploadResult
8
+ from bb_integrations_lib.models.rita.issue import IssueBase, IssueCategory
9
+ from bb_integrations_lib.protocols.flat_file import PeBulkSyncIntegration
10
+ from bb_integrations_lib.protocols.pipelines import Step
11
+ from bb_integrations_lib.util.utils import CustomJSONEncoder
12
+
13
+
14
+ class PEBulksSyncPriceStructure(Step):
15
+ def __init__(self, pe_client: GravitatePEAPI, *args, **kwargs):
16
+ super().__init__(*args, **kwargs)
17
+ self.pe_client = pe_client
18
+
19
+ def describe(self) -> str:
20
+ return "Bulk Sync Price Structure in Pricing Engine"
21
+
22
+ async def execute(self, i: List[PeBulkSyncIntegration]) -> UploadResult:
23
+ failed_rows: List = []
24
+ success_rows: List = []
25
+ responses: List = []
26
+ try:
27
+ for row in i:
28
+ row_dump = row.model_dump(exclude_none=True)
29
+ row_dump = self.gen_unique_price_dtos(row_dump)
30
+ try:
31
+ response = await self.pe_client.bulk_sync_price_structure(row_dump)
32
+ responses.append(response)
33
+ success_rows.append({**row_dump, "response": response})
34
+ except Exception as e:
35
+ logger.error(f"Failed to bulk sync row: {e}")
36
+ failed_rows.append({**row_dump, "response": str(e)})
37
+ continue
38
+ except Exception as e:
39
+ if irc := self.pipeline_context.issue_report_config:
40
+ fc = self.pipeline_context.file_config
41
+ key = f"{irc.key_base}_{fc.config_id}_failed_to_upload"
42
+ self.pipeline_context.issues.append(IssueBase(
43
+ key=key,
44
+ config_id=fc.config_id,
45
+ name="Failed to bulk sync data",
46
+ category=IssueCategory.TANK_READING,
47
+ problem_short=f"{len(failed_rows)} rows failed to bulk sync",
48
+ problem_long=json.dumps(failed_rows)
49
+ ))
50
+ logs = {
51
+ "request": [l.model_dump() for l in i],
52
+ "response": responses
53
+ }
54
+ self.pipeline_context.included_files["price merge data"] = json.dumps(logs, cls=CustomJSONEncoder)
55
+ return UploadResult(succeeded=len(success_rows), failed=len(failed_rows),
56
+ succeeded_items=list(success_rows))
57
+
58
+ def gen_unique_price_dtos(self, row_dump: dict):
59
+ deduped = []
60
+ keys = set()
61
+ for elem in row_dump['IntegrationDtos'][0]['PriceInstrumentDTOs']:
62
+ key = f"{elem['ProductLookup']['SourceIdString']} @ {elem['LocationLookup']['SourceIdString']} - {elem['CounterPartyLookup']['SourceIdString']}"
63
+ if key in keys:
64
+ continue
65
+ keys.add(key)
66
+ deduped.append(elem)
67
+ row_dump['IntegrationDtos'][0]['PriceInstrumentDTOs'] = deduped
68
+ return row_dump
@@ -0,0 +1,86 @@
1
+ import json
2
+ from typing import Dict, List
3
+
4
+ from bb_integrations_lib.gravitate.pe_api import GravitatePEAPI
5
+ from bb_integrations_lib.util.utils import CustomJSONEncoder
6
+ from bb_integrations_lib.util.config.manager import GlobalConfigManager
7
+ from bb_integrations_lib.util.config.model import GlobalConfig
8
+ from loguru import logger
9
+ from bb_integrations_lib.models.pipeline_structs import BBDUploadResult, UploadResult
10
+ from bb_integrations_lib.models.rita.issue import IssueBase, IssueCategory
11
+ from bb_integrations_lib.protocols.pipelines import Step
12
+ from bb_integrations_lib.protocols.flat_file import PePriceMergeIntegration
13
+
14
+
15
+ class PEPriceMerge(Step[List[PePriceMergeIntegration], BBDUploadResult, None]):
16
+ def __init__(self, step_configuration: Dict[str, str]):
17
+ super().__init__(step_configuration)
18
+ self.config_manager = GlobalConfigManager()
19
+ self.env_mode = step_configuration.get('mode', "production")
20
+ self.tenant_name = step_configuration.get('tenant_name', None)
21
+ if 'pe_client_base_url' in step_configuration:
22
+ self.client_base_url = step_configuration['pe_client_base_url']
23
+ self.username = step_configuration['pe_username']
24
+ self.password = step_configuration['pe_password']
25
+ self.client_name = step_configuration['client_name']
26
+ self.pe_integration_client = GravitatePEAPI(
27
+ base_url=self.client_base_url,
28
+ username=self.username,
29
+ password=self.password)
30
+
31
+ else:
32
+ if self.tenant_name is None:
33
+ raise ValueError("'tenant_name' or pe client base url are required")
34
+ self.secret_data: GlobalConfig = self.config_manager.get_environment(self.tenant_name)
35
+ if self.env_mode == 'production':
36
+ self.pe_integration_client = GravitatePEAPI(
37
+ base_url=self.secret_data.prod.pe.base_url,
38
+ username=self.secret_data.prod.pe.username,
39
+ password=self.secret_data.prod.pe.password,
40
+ )
41
+ else:
42
+ logger.debug("Initializing API in dev mode")
43
+ self.pe_integration_client = GravitatePEAPI(
44
+ base_url=self.secret_data.test.pe.base_url,
45
+ username=self.secret_data.test.pe.username,
46
+ password=self.secret_data.test.pe.password,
47
+ )
48
+
49
+ def describe(self) -> str:
50
+ return "Merge Prices in Pricing Engine"
51
+
52
+ async def execute(self, i: List[PePriceMergeIntegration]) -> BBDUploadResult:
53
+ failed_rows: List = []
54
+ success_rows: List = []
55
+ responses: List = []
56
+ try:
57
+ for row in i:
58
+ row_dump = row.model_dump(exclude_none=True)
59
+ try:
60
+ response = await self.pe_integration_client.merge_prices(row_dump)
61
+ success_rows.append({**row_dump, "response": response})
62
+ responses.append(response)
63
+ except Exception as e:
64
+ logger.error(f"Failed to merge row: {e}")
65
+ failed_rows.append(row_dump)
66
+ continue
67
+ except Exception as e:
68
+ if irc := self.pipeline_context.issue_report_config:
69
+ fc = self.pipeline_context.file_config
70
+ key = f"{irc.key_base}_{fc.config_id}_failed_to_upload"
71
+ self.pipeline_context.issues.append(IssueBase(
72
+ key=key,
73
+ config_id=fc.config_id,
74
+ name="Failed to merge price row",
75
+ category=IssueCategory.PRICE,
76
+ problem_short=f"{len(failed_rows)} rows failed to price merge",
77
+ problem_long=json.dumps(failed_rows)
78
+ ))
79
+ logs = {
80
+ "request": [l.model_dump() for l in i],
81
+ "response": responses
82
+ }
83
+ self.pipeline_context.included_files["price merge data"] = json.dumps(logs, cls=CustomJSONEncoder)
84
+ return UploadResult(succeeded=len(success_rows), failed=len(failed_rows),
85
+ succeeded_items=list(success_rows))
86
+