bb-integrations-library 3.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (217) hide show
  1. bb_integrations_lib/__init__.py +0 -0
  2. bb_integrations_lib/converters/__init__.py +0 -0
  3. bb_integrations_lib/gravitate/__init__.py +0 -0
  4. bb_integrations_lib/gravitate/base_api.py +20 -0
  5. bb_integrations_lib/gravitate/model.py +29 -0
  6. bb_integrations_lib/gravitate/pe_api.py +122 -0
  7. bb_integrations_lib/gravitate/rita_api.py +552 -0
  8. bb_integrations_lib/gravitate/sd_api.py +572 -0
  9. bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
  10. bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
  11. bb_integrations_lib/gravitate/testing/__init__.py +0 -0
  12. bb_integrations_lib/gravitate/testing/builder.py +55 -0
  13. bb_integrations_lib/gravitate/testing/openapi.py +70 -0
  14. bb_integrations_lib/gravitate/testing/util.py +274 -0
  15. bb_integrations_lib/mappers/__init__.py +0 -0
  16. bb_integrations_lib/mappers/prices/__init__.py +0 -0
  17. bb_integrations_lib/mappers/prices/model.py +106 -0
  18. bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
  19. bb_integrations_lib/mappers/prices/protocol.py +20 -0
  20. bb_integrations_lib/mappers/prices/util.py +61 -0
  21. bb_integrations_lib/mappers/rita_mapper.py +523 -0
  22. bb_integrations_lib/models/__init__.py +0 -0
  23. bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
  24. bb_integrations_lib/models/enums.py +28 -0
  25. bb_integrations_lib/models/pipeline_structs.py +76 -0
  26. bb_integrations_lib/models/probe/probe_event.py +20 -0
  27. bb_integrations_lib/models/probe/request_data.py +431 -0
  28. bb_integrations_lib/models/probe/resume_token.py +7 -0
  29. bb_integrations_lib/models/rita/audit.py +113 -0
  30. bb_integrations_lib/models/rita/auth.py +30 -0
  31. bb_integrations_lib/models/rita/bucket.py +17 -0
  32. bb_integrations_lib/models/rita/config.py +188 -0
  33. bb_integrations_lib/models/rita/constants.py +19 -0
  34. bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
  35. bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
  36. bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
  37. bb_integrations_lib/models/rita/crossroads_network.py +41 -0
  38. bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
  39. bb_integrations_lib/models/rita/email.py +39 -0
  40. bb_integrations_lib/models/rita/issue.py +63 -0
  41. bb_integrations_lib/models/rita/mapping.py +227 -0
  42. bb_integrations_lib/models/rita/probe.py +58 -0
  43. bb_integrations_lib/models/rita/reference_data.py +110 -0
  44. bb_integrations_lib/models/rita/source_system.py +9 -0
  45. bb_integrations_lib/models/rita/workers.py +76 -0
  46. bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
  47. bb_integrations_lib/models/sd/get_order.py +301 -0
  48. bb_integrations_lib/models/sd/orders.py +18 -0
  49. bb_integrations_lib/models/sd_api.py +115 -0
  50. bb_integrations_lib/pipelines/__init__.py +0 -0
  51. bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
  52. bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
  53. bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
  54. bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
  55. bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
  56. bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
  57. bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
  58. bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
  59. bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
  60. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
  61. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
  62. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
  63. bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
  64. bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
  65. bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
  66. bb_integrations_lib/pipelines/shared/__init__.py +0 -0
  67. bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
  68. bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
  69. bb_integrations_lib/pipelines/steps/__init__.py +0 -0
  70. bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
  71. bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
  72. bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
  73. bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
  74. bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
  75. bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
  76. bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
  77. bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
  78. bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
  79. bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
  80. bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
  81. bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
  82. bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
  83. bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
  84. bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
  85. bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
  86. bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
  87. bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
  88. bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
  89. bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
  90. bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
  91. bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
  92. bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
  93. bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
  94. bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
  95. bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
  96. bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
  97. bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
  98. bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
  99. bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
  100. bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
  101. bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
  102. bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
  103. bb_integrations_lib/pipelines/steps/null_step.py +15 -0
  104. bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
  105. bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
  106. bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
  107. bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
  108. bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
  109. bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
  110. bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
  111. bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
  112. bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
  113. bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
  114. bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
  115. bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
  116. bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
  117. bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
  118. bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
  119. bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
  120. bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
  121. bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
  122. bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
  123. bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
  124. bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
  125. bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
  126. bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
  127. bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
  128. bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
  129. bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
  130. bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
  131. bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
  132. bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
  133. bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
  134. bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
  135. bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
  136. bb_integrations_lib/protocols/__init__.py +0 -0
  137. bb_integrations_lib/protocols/flat_file.py +210 -0
  138. bb_integrations_lib/protocols/gravitate_client.py +104 -0
  139. bb_integrations_lib/protocols/pipelines.py +697 -0
  140. bb_integrations_lib/provider/__init__.py +0 -0
  141. bb_integrations_lib/provider/api/__init__.py +0 -0
  142. bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
  143. bb_integrations_lib/provider/api/cargas/client.py +43 -0
  144. bb_integrations_lib/provider/api/cargas/model.py +49 -0
  145. bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
  146. bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
  147. bb_integrations_lib/provider/api/dtn/client.py +128 -0
  148. bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
  149. bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
  150. bb_integrations_lib/provider/api/keyvu/client.py +30 -0
  151. bb_integrations_lib/provider/api/keyvu/model.py +149 -0
  152. bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
  153. bb_integrations_lib/provider/api/macropoint/client.py +28 -0
  154. bb_integrations_lib/provider/api/macropoint/model.py +40 -0
  155. bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
  156. bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
  157. bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
  158. bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
  159. bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
  160. bb_integrations_lib/provider/api/platform_science/client.py +147 -0
  161. bb_integrations_lib/provider/api/platform_science/model.py +82 -0
  162. bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
  163. bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
  164. bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
  165. bb_integrations_lib/provider/api/telapoint/client.py +68 -0
  166. bb_integrations_lib/provider/api/telapoint/model.py +178 -0
  167. bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
  168. bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
  169. bb_integrations_lib/provider/aws/__init__.py +0 -0
  170. bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
  171. bb_integrations_lib/provider/aws/s3/client.py +126 -0
  172. bb_integrations_lib/provider/ftp/__init__.py +0 -0
  173. bb_integrations_lib/provider/ftp/client.py +140 -0
  174. bb_integrations_lib/provider/ftp/interface.py +273 -0
  175. bb_integrations_lib/provider/ftp/model.py +76 -0
  176. bb_integrations_lib/provider/imap/__init__.py +0 -0
  177. bb_integrations_lib/provider/imap/client.py +228 -0
  178. bb_integrations_lib/provider/imap/model.py +3 -0
  179. bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
  180. bb_integrations_lib/provider/sqlserver/client.py +106 -0
  181. bb_integrations_lib/secrets/__init__.py +4 -0
  182. bb_integrations_lib/secrets/adapters.py +98 -0
  183. bb_integrations_lib/secrets/credential_models.py +222 -0
  184. bb_integrations_lib/secrets/factory.py +85 -0
  185. bb_integrations_lib/secrets/providers.py +160 -0
  186. bb_integrations_lib/shared/__init__.py +0 -0
  187. bb_integrations_lib/shared/exceptions.py +25 -0
  188. bb_integrations_lib/shared/model.py +1039 -0
  189. bb_integrations_lib/shared/shared_enums.py +510 -0
  190. bb_integrations_lib/storage/README.md +236 -0
  191. bb_integrations_lib/storage/__init__.py +0 -0
  192. bb_integrations_lib/storage/aws/__init__.py +0 -0
  193. bb_integrations_lib/storage/aws/s3.py +8 -0
  194. bb_integrations_lib/storage/defaults.py +72 -0
  195. bb_integrations_lib/storage/gcs/__init__.py +0 -0
  196. bb_integrations_lib/storage/gcs/client.py +8 -0
  197. bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
  198. bb_integrations_lib/storage/gcsmanager/client.py +8 -0
  199. bb_integrations_lib/storage/setup.py +29 -0
  200. bb_integrations_lib/util/__init__.py +0 -0
  201. bb_integrations_lib/util/cache/__init__.py +0 -0
  202. bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
  203. bb_integrations_lib/util/cache/protocol.py +9 -0
  204. bb_integrations_lib/util/config/__init__.py +0 -0
  205. bb_integrations_lib/util/config/manager.py +391 -0
  206. bb_integrations_lib/util/config/model.py +41 -0
  207. bb_integrations_lib/util/exception_logger/__init__.py +0 -0
  208. bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
  209. bb_integrations_lib/util/exception_logger/test.py +114 -0
  210. bb_integrations_lib/util/utils.py +364 -0
  211. bb_integrations_lib/workers/__init__.py +0 -0
  212. bb_integrations_lib/workers/groups.py +13 -0
  213. bb_integrations_lib/workers/rpc_worker.py +50 -0
  214. bb_integrations_lib/workers/topics.py +20 -0
  215. bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
  216. bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
  217. bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
@@ -0,0 +1,720 @@
1
+ import json
2
+ import math
3
+ from enum import Enum
4
+ from typing import Dict, Iterable, List, Optional
5
+
6
+ import pandas as pd
7
+ import pytz
8
+ from dateutil.parser import parse
9
+ from loguru import logger
10
+ import traceback
11
+
12
+ from bb_integrations_lib.models.rita.mapping import MappingType
13
+ from bb_integrations_lib.shared.model import RawData, FileConfigRawData, MappingMode
14
+ from bb_integrations_lib.protocols.pipelines import Step
15
+ from bb_integrations_lib.models.rita.config import FileConfig, ConfigAction
16
+ from bb_integrations_lib.models.rita.issue import IssueBase, IssueCategory
17
+ from bb_integrations_lib.protocols.flat_file import TankReading, TankMonitorType, PriceRow, DriverCredential, \
18
+ BulkSyncIntegrationDTO, PriceInstrumentDTO, PELookup, PriceTypeDTO, PeBulkSyncIntegration, \
19
+ PriceMergeIntegrationDTO, PriceMergeValue, PePriceMergeIntegration, TankSales
20
+ from bb_integrations_lib.mappers.rita_mapper import RitaMapper, RitaAPIMappingProvider
21
+ from babel.numbers import parse_decimal
22
+ from zipfile import BadZipFile
23
+ from dateutil.tz import gettz
24
+
25
+ import warnings
26
+
27
+ tzmapping = {
28
+ 'EST': gettz("US/Eastern"),
29
+ 'EDT': gettz("US/Eastern"),
30
+ 'CST': gettz("US/Central"),
31
+ 'CDT': gettz("US/Central"),
32
+ 'MST': gettz("US/Mountain"),
33
+ 'MDT': gettz("US/Mountain"),
34
+ 'PST': gettz("US/Pacific"),
35
+ 'PDT': gettz("US/Pacific"),
36
+ }
37
+
38
+
39
+ class FileConfigParserStep(
40
+ Step[FileConfigRawData, Iterable[TankReading] | Iterable[PriceRow] | Iterable[
41
+ DriverCredential] | Iterable[PeBulkSyncIntegration] | Iterable[PePriceMergeIntegration] | Iterable[
42
+ TankSales], None]):
43
+ def __init__(self, step_configuration: Dict[str, str]):
44
+ super().__init__(step_configuration)
45
+ self.rita_url = step_configuration["rita_url"]
46
+ self.rita_psk = None
47
+ self.client_id = step_configuration["client_id"]
48
+ self.client_secret = step_configuration["client_secret"]
49
+ self.rita_tenant = step_configuration["rita_tenant"]
50
+ self.output_type = step_configuration.get('output_type', "TankReading")
51
+ self.verbose = step_configuration.get("verbose", False)
52
+ self.mapping_type = step_configuration.get("mapping_type", MappingMode.full)
53
+ self.included_payload = step_configuration.get("included_payload", {})
54
+
55
+ warnings.warn(
56
+ "FileConfigParserStep is deprecated. Use FileConfigParserV2 + a custom parser instead.",
57
+ DeprecationWarning, stacklevel=2
58
+ )
59
+
60
+ def describe(self) -> str:
61
+ return f"Parse Flat File into {self.output_type}"
62
+
63
+ async def execute(self, i: FileConfigRawData) -> Iterable[TankReading] | Iterable[PriceRow] | Iterable[
64
+ DriverCredential] | Iterable[PeBulkSyncIntegration] | Iterable[PePriceMergeIntegration | Iterable[TankSales]]:
65
+ assert isinstance(i, FileConfigRawData)
66
+ logger.info(f"Beginning data extract for file {i.file_name}")
67
+ logger.debug(
68
+ f"File Config: {i.file_config.client_name}, Inbound: {i.file_config.inbound_directory}, Archive: {i.file_config.archive_directory}")
69
+
70
+ return list(await self.parse_raw_data(i))
71
+
72
+ def parse_tank_and_site_ids(self, records: List[Dict], rd: FileConfigRawData, mapper: RitaMapper) -> Dict:
73
+ """Only to be used by TankReading or TankSales"""
74
+ translated_records = {}
75
+ translation_failures = []
76
+ mapping_failures = []
77
+ for row in records:
78
+ try:
79
+ translated = self.translate_row(rd.file_config, row)
80
+ except Exception as e:
81
+ if self.verbose:
82
+ logger.warning(f"Translation failed for record {row}: {e}")
83
+ translation_failures.append(row)
84
+ continue
85
+ try:
86
+ if 'volume' in translated:
87
+ if translated['volume'] == 'nan':
88
+ if self.verbose:
89
+ logger.warning(f"Skipped record {row} due to NaN volume. Translated={translated}")
90
+ continue
91
+ if float(translated["volume"]) < 0:
92
+ if self.verbose:
93
+ logger.warning(f"Skipped record {row} due to Negative volume. Translated={translated}")
94
+ continue
95
+ if 'tank_id' in translated:
96
+ if translated['tank_id'] == 'nan':
97
+ if self.verbose:
98
+ logger.warning(f"Skipped record {row} due to NaN tank. Translated={translated}")
99
+ continue
100
+ if 'sales' in translated:
101
+ if translated['sales'] == 'nan':
102
+ if self.verbose:
103
+ logger.warning(f"Skipped record {row} due to NaN sales. Translated={translated}")
104
+ continue
105
+ if self.mapping_type == MappingMode.skip:
106
+ key = f"{translated["site_id"]}_{translated["tank_id"]}"
107
+ translated_records[key] = translated
108
+ elif self.mapping_type == MappingMode.partial or self.mapping_type == MappingMode.full:
109
+ try:
110
+ site_id = translated['site_id']
111
+ tank_id = translated["tank_id"]
112
+ mapped_site_ids = mapper.get_gravitate_parent_ids(site_id, MappingType.site)
113
+ mapped_tank_ids = mapper.get_gravitate_child_ids(site_id, tank_id.strip(), MappingType.tank)
114
+ for site_id in mapped_site_ids:
115
+ for tank_id in mapped_tank_ids:
116
+ key = f"{site_id}_{tank_id}"
117
+ translated["site_id"] = site_id
118
+ translated["tank_id"] = tank_id
119
+ translated_records[key] = {**translated}
120
+ except (KeyError, ValueError) as e:
121
+ if self.mapping_type == MappingMode.partial:
122
+ key = f"{translated["site_id"]}_{translated["tank_id"]}"
123
+ translated_records[key] = translated
124
+ else:
125
+ raise e
126
+ else:
127
+ raise ValueError(f"MappingType {self.mapping_type} not supported")
128
+
129
+
130
+ except (KeyError, ValueError) as e:
131
+ if self.verbose:
132
+ logger.warning(f"Skipped record {row} due to error: {e}")
133
+ mapping_failures.append(translated)
134
+ self.handle_issues(translation_failures, mapping_failures, rd, output_type=self.output_type)
135
+ return translated_records
136
+
137
+ def parse_price_row(self, records: List[Dict], rd: FileConfigRawData, mapper: Optional[RitaMapper] = None) -> \
138
+ Iterable[PriceRow]:
139
+ translation_failures = []
140
+ mapping_failures = []
141
+ for row in records:
142
+ try:
143
+ translated = self.translate_row(rd.file_config, row)
144
+ yield PriceRow(
145
+ effective_from=translated["effective_from"],
146
+ effective_to=translated["effective_to"],
147
+ price=float(translated["price"]),
148
+ price_type=translated["price_type"],
149
+ contract=translated.get("contract", None),
150
+ timezone=translated.get("timezone", None),
151
+ terminal_id=translated.get("terminal_id", None),
152
+ terminal_source_id=translated.get("terminal_source_id", None),
153
+ terminal_source_system_id=translated.get("terminal_source_system_id", None),
154
+ terminal=translated.get("terminal", None),
155
+ product_id=translated.get("product_id", None),
156
+ product_source_id=translated.get("product_source_id", None),
157
+ product_source_system_id=translated.get("product_source_system_id", None),
158
+ product=translated.get("product", None),
159
+ supplier_id=translated.get("supplier_id", None),
160
+ supplier_source_id=translated.get("supplier_source_id", None),
161
+ supplier_source_system_id=translated.get("supplier_source_system_id", None),
162
+ counterparty_source_id=translated.get("counterparty_source_id", None),
163
+ counterparty_source_system_id=translated.get("counterparty_source_system_id", None),
164
+ supplier=translated.get("supplier", None),
165
+ enabled=bool(translated.get("enabled", None)),
166
+ disabled_until=translated.get("disabled_until", None),
167
+ expire=translated.get("expire", None),
168
+ min_quantity=translated.get("min_quantity", None),
169
+ max_quantity=translated.get("max_quantity", None),
170
+ curve_id=translated.get("curve_id", None),
171
+ row=translated.get("row", None),
172
+ )
173
+ except(KeyError, ValueError) as e:
174
+ translation_failures.append(row)
175
+ if self.verbose:
176
+ logger.warning(f"Skipped record {row} due to error: {e}")
177
+ self.handle_issues(translation_failures, mapping_failures, rd, output_type=self.output_type)
178
+
179
+ def parse_tank_reading_rows(self, records: List[Dict], rd: FileConfigRawData,
180
+ mapper: Optional[RitaMapper] = None) -> Iterable[TankReading]:
181
+ translated_records = self.parse_tank_and_site_ids(records, rd, mapper)
182
+
183
+ for translated_record in translated_records.values():
184
+ if translated_record.get("reading_time") is None:
185
+ logger.warning(f"Skipped record {translated_record} due missing date")
186
+ continue
187
+ try:
188
+ date_parsed = parse(translated_record.get("reading_time"), tzinfos=tzmapping).isoformat()
189
+ except Exception as parse_error:
190
+ logger.warning(f"Skipped record {translated_record} due to date parsing error: {parse_error}")
191
+ continue
192
+ try:
193
+ yield TankReading(
194
+ store=translated_record.get("site_id"),
195
+ date=date_parsed,
196
+ monitor_type=TankMonitorType.bbd,
197
+ timezone=translated_record.get("timezone"),
198
+ volume=translated_record.get("volume"),
199
+ tank=translated_record.get("tank_id"),
200
+ payload=self.included_payload
201
+
202
+ )
203
+ except ValueError as e:
204
+ # ValueError here probably means a volume has a , in the number or is a string. Parse
205
+ # it with Babel instead.
206
+ yield TankReading(
207
+ store=translated_record.get("site_id"),
208
+ date=date_parsed,
209
+ monitor_type=TankMonitorType.bbd,
210
+ timezone=translated_record.get("timezone"),
211
+ volume=float(parse_decimal(translated_record.get("volume"), locale="en_US")),
212
+ tank=translated_record.get("tank_id"),
213
+ payload=self.included_payload
214
+ )
215
+ except Exception as ee:
216
+ logger.warning(f"Skipped record {translated_record} due to error: {ee}")
217
+ continue
218
+
219
+ def parse_tank_sales_rows(self, records: List[Dict], rd: FileConfigRawData, mapper: Optional[RitaMapper] = None) -> \
220
+ Iterable[TankSales]:
221
+ translated_records = self.parse_tank_and_site_ids(records, rd, mapper)
222
+ ll = []
223
+ errors = []
224
+ for key in translated_records:
225
+ site_id = translated_records[key].get("site_id", "")
226
+ tank_id = translated_records[key].get("tank_id", "")
227
+ try:
228
+ tz = translated_records.get(key, {}).get("timezone")
229
+ ll.append(TankSales(
230
+ store_number=site_id,
231
+ tank_id=tank_id,
232
+ sales=float(translated_records.get(key)['sales']),
233
+ date=parse(translated_records.get(key)['date'], tzinfos=tzmapping).replace(
234
+ tzinfo=(pytz.timezone(tz)), hour=0, minute=0, second=0, microsecond=0).isoformat(),
235
+ ))
236
+ except ValueError as e:
237
+ errors.append({
238
+ "error": f"ValueError: {str(e)}",
239
+ "row": f"record containing site: {site_id} and tank: {tank_id}",
240
+ })
241
+ if self.verbose:
242
+ logger.warning(
243
+ f"Skipped containing site: {site_id} and tank: {tank_id} due to error: {str(e)}")
244
+ continue
245
+ except KeyError as ke:
246
+ errors.append({
247
+ "error": f"KeyError: {str(ke)}",
248
+ "row": f"record containing site: {site_id} and tank: {tank_id}",
249
+ })
250
+ if self.verbose:
251
+ logger.warning(
252
+ f"Skipped record containing site: {site_id} and tank: {tank_id} due to error: {str(ke)}")
253
+ continue
254
+ except TypeError as tpe:
255
+ errors.append({
256
+ "error": f"TypeError: {str(tpe)}",
257
+ "row": f"record containing site: {site_id} and tank: {tank_id}",
258
+ })
259
+ if self.verbose:
260
+ logger.warning(
261
+ f"Skipped record containing site: {site_id} and tank: {tank_id} due to error: {str(tpe)}")
262
+ continue
263
+ except Exception as ee:
264
+ errors.append({
265
+ "error": f"Unknown error: {str(ee)}",
266
+ "row": f"record containing site: {site_id} and tank: {tank_id}",
267
+ })
268
+ if self.verbose:
269
+ logger.warning(
270
+ f"Skipped record containing site: {site_id} and tank: {tank_id} due to error: {str(ee)}")
271
+ continue
272
+ return ll
273
+
274
+ def parse_credential_rows(self, records: List[Dict], rd: FileConfigRawData, mapper: Optional[RitaMapper] = None) -> \
275
+ Iterable[DriverCredential]:
276
+ translation_failures = []
277
+ mapping_failures = []
278
+ for row in records:
279
+ try:
280
+ translated = self.translate_row(rd.file_config, row)
281
+ credential_ids = mapper.get_gravitate_parent_ids(source_id=translated['credential_id'],
282
+ mapping_type=MappingType.credential)
283
+ driver_ids = mapper.get_gravitate_parent_ids(source_id=translated['driver_id'],
284
+ mapping_type=MappingType.driver)
285
+ expiration_date = translated.get('expiration_date')
286
+ if expiration_date in ['nan']:
287
+ if self.verbose:
288
+ logger.warning(f"Skipped record due to NaN expiration date")
289
+ continue
290
+ for credential_id in credential_ids:
291
+ for driver_id in driver_ids:
292
+ yield DriverCredential(
293
+ driver_id=driver_id,
294
+ credential_id=credential_id,
295
+ certification_date=translated.get('certification_date'),
296
+ expiration_date=expiration_date
297
+ )
298
+ except (KeyError, ValueError) as e:
299
+ translation_failures.append(row)
300
+ if self.verbose:
301
+ logger.warning(f"Skipped record {row} due to error: {e}")
302
+ continue
303
+ self.handle_issues(translation_failures, mapping_failures, rd, output_type=self.output_type)
304
+
305
+ def parse_price_sync_rows(self, records: List[Dict], rd: FileConfigRawData, mapper: Optional[RitaMapper] = None) -> \
306
+ Iterable[PeBulkSyncIntegration]:
307
+ translation_failures = []
308
+ mapping_failures = []
309
+ dtos: List[PriceInstrumentDTO] = []
310
+ price_publisher_name = None
311
+ posting_type = "Posting"
312
+ source_system_id = None
313
+ for row in records:
314
+ try:
315
+ row_is_rvp = FileConfigParserStep.is_rvp(row.get('RVP', '0.0'))
316
+ translated = self.translate_row(rd.file_config, row)
317
+ price_publisher_name = translated['price_publisher']
318
+ configuration = translated['configuration']
319
+ supplier_key = translated['supplier_key']
320
+ location_key = translated['location_key']
321
+ product_key = translated['product_key']
322
+ posting_type = translated.get('posting_type', "Posting")
323
+ location_name = translated.get('location_name', None)
324
+ product_name = translated.get('product_name', None)
325
+ supplier_name = translated.get('supplier_name', None)
326
+ if row_is_rvp and not "RVP" in product_key:
327
+ product_key = FileConfigParserStep.format_rvp_product(product_key, row['RVP'])
328
+ if '.' in product_key and not row_is_rvp:
329
+ product_key = product_key.split('.')[0]
330
+ source_system_id = translated['source_system_id']
331
+
332
+ if self.mapping_type == MappingMode.full:
333
+ supplier_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
334
+ source_child_id=supplier_key,
335
+ mapping_type=MappingType.counterparty)
336
+ location_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
337
+ source_child_id=location_key,
338
+ mapping_type=MappingType.terminal)
339
+ product_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
340
+ source_child_id=product_key,
341
+ mapping_type=MappingType.product)
342
+ elif self.mapping_type == MappingMode.skip:
343
+ supplier_source_id = supplier_key
344
+ location_source_id = location_key
345
+ product_source_id = product_key
346
+ else:
347
+ raise ValueError(f"Unsupported mapping type: {self.mapping_type}")
348
+ if product_name is not None and location_name is not None and supplier_name is not None:
349
+ price_instrument_name = f"{product_name} @ {location_name} - {supplier_name}"
350
+ else:
351
+ price_instrument_name = f"{product_source_id} @ {location_source_id} - {supplier_source_id}"
352
+ price_instrument_source_string_id = f"{price_publisher_name} - {price_instrument_name}"
353
+ dtos.append(PriceInstrumentDTO(
354
+ Name=price_instrument_name,
355
+ Abbreviation=price_instrument_name,
356
+ SourceIdString=price_instrument_source_string_id,
357
+ ProductLookup=PELookup(
358
+ SourceIdString=product_source_id,
359
+ SourceSystemId=int(source_system_id)
360
+ ),
361
+ LocationLookup=PELookup(
362
+ SourceIdString=location_source_id,
363
+ SourceSystemId=int(source_system_id)
364
+ ),
365
+ CounterPartyLookup=PELookup(
366
+ SourceIdString=supplier_source_id,
367
+ SourceSystemId=int(source_system_id)
368
+ )
369
+ ))
370
+ except (KeyError, ValueError) as e:
371
+ mapping_failures.append(row)
372
+ logger.warning(f"Skipped record {row} due to Key Error or Value Error: {e}")
373
+
374
+ continue
375
+ except Exception as uh:
376
+ translation_failures.append(row)
377
+ if self.verbose:
378
+ logger.warning(f"Skipped record {row} due to unhandled exception: {uh.args}")
379
+ if irc := self.pipeline_context.issue_report_config:
380
+ if len(translation_failures) > 0:
381
+ key = f"{irc.key_base}_{rd.file_config.config_id}_translation_error"
382
+ self.pipeline_context.issues.append(IssueBase(
383
+ key=key,
384
+ config_id=rd.file_config.config_id,
385
+ name=f"Translation error - {rd.file_config.client_name}",
386
+ category=IssueCategory.TANK_READING,
387
+ problem_short=f"{rd.file_name}: At least 1 row failed to translate",
388
+ problem_long=json.dumps(translation_failures)
389
+ ))
390
+ continue
391
+
392
+ if dtos and price_publisher_name and source_system_id:
393
+ try:
394
+ yield PeBulkSyncIntegration(
395
+ IntegrationDtos=[
396
+ BulkSyncIntegrationDTO(
397
+ Name=price_publisher_name,
398
+ Abbreviation=price_publisher_name,
399
+ SourceIdString=price_publisher_name,
400
+ PriceInstrumentDTOs=dtos,
401
+ PriceTypeDTOs=[
402
+ PriceTypeDTO(
403
+ PriceTypeMeaning=posting_type
404
+ )
405
+ ]
406
+ )
407
+ ],
408
+ SourceSystemId=int(source_system_id)
409
+ )
410
+
411
+ except Exception as ex:
412
+ logger.warning(f"Unexpected error when yielding integration: {ex}")
413
+ if irc := self.pipeline_context.issue_report_config:
414
+ key = f"{irc.key_base}_{rd.file_config.config_id}_parser_error"
415
+ self.pipeline_context.issues.append(IssueBase(
416
+ key=key,
417
+ config_id=rd.file_config.config_id,
418
+ name=f"Parser error - {rd.file_config.client_name}",
419
+ category=IssueCategory.REFERENCE_DATA,
420
+ problem_short=f"{rd.file_name}: Unexpected exception",
421
+ problem_long=json.dumps({
422
+ "exception_type": type(ex).__name__,
423
+ "args": list(ex.args) if ex.args else []
424
+ })
425
+ ))
426
+ else:
427
+ logger.warning("No valid records were processed to create an integration")
428
+
429
+ def parse_price_merge_rows(self, records: List[Dict], rd: FileConfigRawData, mapper: Optional[RitaMapper] = None) -> \
430
+ Iterable[PePriceMergeIntegration]:
431
+ translation_failures = []
432
+ mapping_failures = []
433
+ dtos: List[PriceMergeIntegrationDTO] = []
434
+ price_publisher_name = None
435
+ posting_type = "Posting" # Default value
436
+ source_system_id = None
437
+ for row in records:
438
+ try:
439
+ row_is_rvp = FileConfigParserStep.is_rvp(row.get('RVP', '0.0'))
440
+ translated = self.translate_row(rd.file_config, row)
441
+ price_publisher_name = translated['price_publisher']
442
+ configuration = translated['configuration']
443
+ supplier_key = translated['supplier_key']
444
+ location_key = translated['location_key']
445
+ product_key = translated['product_key']
446
+ location_name = translated.get('location_name', None)
447
+ product_name = translated.get('product_name', None)
448
+ supplier_name = translated.get('supplier_name', None)
449
+ price_factor = translated.get('price_factor')
450
+ if row_is_rvp and not "RVP" in product_key:
451
+ product_key = FileConfigParserStep.format_rvp_product(product_key, row['RVP'])
452
+ if '.' in product_key and not row_is_rvp:
453
+ product_key = product_key.split('.')[0]
454
+ source_system_id = translated['source_system_id']
455
+ posting_type = translated.get('posting_type', "Posting")
456
+ date_str = translated['date']
457
+ price = translated['price']
458
+ if price_factor is not None:
459
+ price = float(price) / int(price_factor)
460
+
461
+ if self.mapping_type == MappingMode.full:
462
+ supplier_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
463
+ source_child_id=supplier_key,
464
+ mapping_type=MappingType.counterparty)
465
+ location_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
466
+ source_child_id=location_key,
467
+ mapping_type=MappingType.terminal)
468
+ product_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
469
+ source_child_id=product_key,
470
+ mapping_type=MappingType.product)
471
+ elif self.mapping_type == MappingMode.skip:
472
+ supplier_source_id = supplier_key
473
+ location_source_id = location_key
474
+ product_source_id = product_key
475
+ else:
476
+ raise ValueError(f"Unsupported mapping type: {self.mapping_type}")
477
+ if product_name is not None and location_name is not None and supplier_name is not None:
478
+ price_instrument_name = f"{product_name} @ {location_name} - {supplier_name}"
479
+ else:
480
+ price_instrument_name = f"{product_source_id} @ {location_source_id} - {supplier_source_id}"
481
+ price_instrument_source_string_id = f"{price_publisher_name} - {price_instrument_name}"
482
+ dtos.append(PriceMergeIntegrationDTO(
483
+ PriceInstrumentLookup=PELookup(
484
+ SourceIdString=price_instrument_source_string_id,
485
+ SourceSystemId=int(source_system_id)
486
+ ),
487
+ EffectiveFromDateTime=parse(date_str).isoformat(),
488
+ PriceValues=[
489
+ PriceMergeValue(
490
+ Value=float(price),
491
+ )
492
+ ]
493
+ ))
494
+
495
+
496
+ except (KeyError, ValueError) as e:
497
+ mapping_failures.append(row)
498
+ logger.warning(f"Skipped record {row} due to Key Error or Value Error: {e}")
499
+ continue
500
+ except Exception as uh:
501
+ translation_failures.append(row)
502
+ if self.verbose:
503
+ logger.warning(f"Skipped record {row} due to unhandled exception: {uh.args}")
504
+ if irc := self.pipeline_context.issue_report_config:
505
+ if len(translation_failures) > 0:
506
+ key = f"{irc.key_base}_{rd.file_config.config_id}_translation_error"
507
+ self.pipeline_context.issues.append(IssueBase(
508
+ key=key,
509
+ config_id=rd.file_config.config_id,
510
+ name=f"Translation error - {rd.file_config.client_name}",
511
+ category=IssueCategory.TANK_READING,
512
+ problem_short=f"{rd.file_name}: At least 1 row failed to translate",
513
+ problem_long=json.dumps(translation_failures)
514
+ ))
515
+ continue
516
+ if dtos and price_publisher_name and source_system_id:
517
+ try:
518
+ yield PePriceMergeIntegration(
519
+ IntegrationDtos=dtos,
520
+ SourceSystemId=int(source_system_id)
521
+ )
522
+ except Exception as ex:
523
+ logger.warning(f"Unexpected error when yielding integration: {ex}")
524
+ if irc := self.pipeline_context.issue_report_config:
525
+ key = f"{irc.key_base}_{rd.file_config.config_id}_parser_error"
526
+ self.pipeline_context.issues.append(IssueBase(
527
+ key=key,
528
+ config_id=rd.file_config.config_id,
529
+ name=f"Parser error - {rd.file_config.client_name}",
530
+ category=IssueCategory.PRICE,
531
+ problem_short=f"{rd.file_name}: Unexpected exception",
532
+ problem_long=json.dumps({
533
+ "exception_type": type(ex).__name__,
534
+ "args": list(ex.args) if ex.args else []
535
+ })
536
+ ))
537
+
538
+ def handle_issues(self, translation_failures: List, mapping_failures: List, rd: FileConfigRawData,
539
+ output_type: Optional[str] = None):
540
+ if not output_type or output_type is None:
541
+ output_type = self.output_type
542
+ if irc := self.pipeline_context.issue_report_config:
543
+ if output_type == "TankReading":
544
+ if len(translation_failures) > 0:
545
+ key = f"{irc.key_base}_{rd.file_config.config_id}_translation_error"
546
+ self.pipeline_context.issues.append(IssueBase(
547
+ key=key,
548
+ config_id=rd.file_config.config_id,
549
+ name=f"Translation error - {rd.file_config.client_name}",
550
+ category=IssueCategory.TANK_READING,
551
+ problem_short=f"{rd.file_name}: At least 1 row failed to translate",
552
+ problem_long=json.dumps(translation_failures)
553
+ ))
554
+
555
+ if len(mapping_failures) > 0:
556
+ key = f"{irc.key_base}_{rd.file_config.config_id}_mapping_error"
557
+ self.pipeline_context.issues.append(IssueBase(
558
+ key=key,
559
+ config_id=rd.file_config.config_id,
560
+ name=f"Mapping error - {rd.file_config.client_name}",
561
+ category=IssueCategory.TANK_READING,
562
+ problem_short=f"{rd.file_config.client_name}: At least 1 row failed to map",
563
+ problem_long=json.dumps(mapping_failures)
564
+ ))
565
+
566
+ async def parse_raw_data(self, rd: FileConfigRawData) -> Iterable[
567
+ TankReading | PriceRow | DriverCredential | PeBulkSyncIntegration | PePriceMergeIntegration | TankSales]:
568
+ try:
569
+ records, raw_data, mapper = await self.get_records(rd)
570
+ if self.output_type == "TankReading":
571
+ return self.parse_tank_reading_rows(records, rd, mapper)
572
+ if self.output_type == "TankSales":
573
+ return self.parse_tank_sales_rows(records, rd, mapper)
574
+ elif self.output_type == "PriceRow":
575
+ return self.parse_price_row(records, rd, mapper)
576
+ elif self.output_type == "Credential":
577
+ return self.parse_credential_rows(records, rd, mapper)
578
+ elif self.output_type == "PeStructureSync":
579
+ return self.parse_price_sync_rows(records, rd, mapper)
580
+ elif self.output_type == "PePriceMerge":
581
+ return self.parse_price_merge_rows(records, rd, mapper)
582
+ else:
583
+ raise ValueError(
584
+ f"Invalid output_type: '{self.output_type}'. Only 'TankReading', 'PriceRow', 'Credential', 'PeStructureSync','PePriceMerge' or 'TankSales' are supported.")
585
+ except Exception as e:
586
+ logger.error(f"Error in parse_raw_data: {e}")
587
+ if irc := self.pipeline_context.issue_report_config:
588
+ key = f"{irc.key_base}_{rd.file_config.config_id}_parser_error"
589
+ self.pipeline_context.issues.append(IssueBase(
590
+ key=key,
591
+ config_id=rd.file_config.config_id,
592
+ name=f"Parser error - {rd.file_config.client_name}",
593
+ category=IssueCategory.UNKNOWN,
594
+ problem_short=f"{rd.file_name}: Unexpected exception in parse_raw_data",
595
+ problem_long=json.dumps({
596
+ "exception_type": type(e).__name__,
597
+ "args": list(e.args) if e.args else []
598
+ })
599
+ ))
600
+ raise
601
+
602
+ async def get_records(self, rd: RawData): # Misstyped
603
+ assert isinstance(rd, FileConfigRawData)
604
+ try:
605
+ if hasattr(rd.data, 'seek'):
606
+ rd.data.seek(0)
607
+ match rd.file_config.file_extension:
608
+ case "csv1":
609
+ """This format will skip the top row of the file"""
610
+ temp_df = pd.read_csv(rd.data, index_col=False, dtype=str, skiprows=1)
611
+ temp_df = temp_df.rename(columns=lambda x: x.strip())
612
+ records = temp_df.to_dict(orient='records')
613
+ case "csv":
614
+ temp_df = pd.read_csv(rd.data, index_col=False, dtype=str)
615
+ temp_df = temp_df.rename(columns=lambda x: x.strip())
616
+ records = temp_df.to_dict(orient="records")
617
+ case "csv_headless":
618
+ """This format will treat the file as headless and load in generic column names 'col 1', 'col 2', etc."""
619
+ df = pd.read_csv(rd.data, index_col=False, dtype=str, header=None)
620
+ df.columns = [f"col {i + 1}" for i in range(df.shape[1])]
621
+ records = df.to_dict(orient='records')
622
+ case "xls" | "xlsx":
623
+ """
624
+ Reads an excel and returns a dataframe
625
+ """
626
+ try:
627
+ temp_df = pd.read_excel(rd.data, engine="openpyxl", dtype=str)
628
+ temp_df = temp_df.rename(columns=lambda x: x.strip())
629
+ records = temp_df.to_dict(orient='records')
630
+ except (OSError, BadZipFile) as e:
631
+ # The file may be an old binary Excel file.
632
+ records = pd.read_excel(rd.data, engine="xlrd", dtype=str).to_dict(orient='records')
633
+ case "html":
634
+ """
635
+ Reads an html and returns a dataframe
636
+ """
637
+ data = pd.read_html(rd.data, header=0)
638
+ merged = pd.concat(data)
639
+ records = merged.to_dict(orient="records")
640
+ case _:
641
+ raise ValueError("The file_extension in the file config is not supported")
642
+
643
+ rd.data_buffer_bkp = pd.DataFrame(records).to_csv(index=False)
644
+ mapper = RitaMapper(
645
+ provider=RitaAPIMappingProvider(
646
+ client_id=self.client_id,
647
+ client_secret=self.client_secret,
648
+ rita_tenant=self.rita_tenant,
649
+ ),
650
+ source_system=rd.file_config.source_system,
651
+ )
652
+ await mapper.load_mappings_async()
653
+
654
+ return records, rd, mapper
655
+ except Exception as e:
656
+ if irc := self.pipeline_context.issue_report_config:
657
+ key = f"{irc.key_base}_{rd.file_config.config_id}_failed_to_load"
658
+ name = f"Failed to load file \"{rd.file_name}\" for client {rd.file_config.client_name}"
659
+ self.pipeline_context.issues.append(IssueBase(
660
+ key=key,
661
+ config_id=rd.file_config.config_id,
662
+ name=name,
663
+ category=IssueCategory.TANK_READING,
664
+ problem_short=str(e),
665
+ problem_long=traceback.format_exc()
666
+ ))
667
+ raise e
668
+
669
+ def translate_row(self, file_config: FileConfig, row: dict) -> Dict:
670
+ output_row = {}
671
+ for column in file_config.cols:
672
+ if len(column.file_columns) == 0:
673
+ output_row[column.column_name] = None
674
+ elif column.action == ConfigAction.concat:
675
+ concatenated = ""
676
+ for entry in column.file_columns:
677
+ stripped_entry = entry.strip()
678
+ if stripped_entry in row:
679
+ value = row[stripped_entry]
680
+ if value is None or (isinstance(value, float) and math.isnan(value) or pd.isna(value)):
681
+ concatenated += ""
682
+ else:
683
+ concatenated += str(value)
684
+ else:
685
+ concatenated += str(entry)
686
+ output_row[column.column_name] = concatenated
687
+ elif column.action == ConfigAction.parse_date:
688
+ output_row[column.column_name] = str(pd.to_datetime(row[column.file_columns[0]]))
689
+ elif column.action == ConfigAction.add:
690
+ output_row[column.column_name] = column.file_columns[0]
691
+ elif column.action == ConfigAction.remove_leading_zeros:
692
+ output_row[column.column_name] = self.strip_leading_zeroes(str(row[column.file_columns[0]]))
693
+ elif column.action == ConfigAction.remove_trailing_zeros:
694
+ output_row[column.column_name] = self.strip_trailing_zeroes(str(row[column.file_columns[0]]))
695
+ else:
696
+ output_row[column.column_name] = str(row[column.file_columns[0]])
697
+ return output_row
698
+
699
+ @staticmethod
700
+ def strip_leading_zeroes(row: str):
701
+ return row.lstrip('0')
702
+
703
+ @staticmethod
704
+ def strip_trailing_zeroes(row: str):
705
+ return row.rstrip('0')
706
+
707
+ @staticmethod
708
+ def is_rvp(rvp: str) -> bool:
709
+ try:
710
+ return float(rvp) > 0.0
711
+ except (ValueError, TypeError):
712
+ return False
713
+
714
+ @staticmethod
715
+ def format_rvp_product(product_key: str, rvp: str) -> str:
716
+ rvp_str = str(rvp)
717
+ if product_key.endswith(rvp_str):
718
+ product_key = product_key[:-len(rvp_str)]
719
+ product_key = product_key.rstrip('.')
720
+ return f"{product_key}{float(rvp_str)}"