bb-integrations-library 3.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bb_integrations_lib/__init__.py +0 -0
- bb_integrations_lib/converters/__init__.py +0 -0
- bb_integrations_lib/gravitate/__init__.py +0 -0
- bb_integrations_lib/gravitate/base_api.py +20 -0
- bb_integrations_lib/gravitate/model.py +29 -0
- bb_integrations_lib/gravitate/pe_api.py +122 -0
- bb_integrations_lib/gravitate/rita_api.py +552 -0
- bb_integrations_lib/gravitate/sd_api.py +572 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
- bb_integrations_lib/gravitate/testing/__init__.py +0 -0
- bb_integrations_lib/gravitate/testing/builder.py +55 -0
- bb_integrations_lib/gravitate/testing/openapi.py +70 -0
- bb_integrations_lib/gravitate/testing/util.py +274 -0
- bb_integrations_lib/mappers/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/model.py +106 -0
- bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
- bb_integrations_lib/mappers/prices/protocol.py +20 -0
- bb_integrations_lib/mappers/prices/util.py +61 -0
- bb_integrations_lib/mappers/rita_mapper.py +523 -0
- bb_integrations_lib/models/__init__.py +0 -0
- bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
- bb_integrations_lib/models/enums.py +28 -0
- bb_integrations_lib/models/pipeline_structs.py +76 -0
- bb_integrations_lib/models/probe/probe_event.py +20 -0
- bb_integrations_lib/models/probe/request_data.py +431 -0
- bb_integrations_lib/models/probe/resume_token.py +7 -0
- bb_integrations_lib/models/rita/audit.py +113 -0
- bb_integrations_lib/models/rita/auth.py +30 -0
- bb_integrations_lib/models/rita/bucket.py +17 -0
- bb_integrations_lib/models/rita/config.py +188 -0
- bb_integrations_lib/models/rita/constants.py +19 -0
- bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
- bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
- bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
- bb_integrations_lib/models/rita/crossroads_network.py +41 -0
- bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
- bb_integrations_lib/models/rita/email.py +39 -0
- bb_integrations_lib/models/rita/issue.py +63 -0
- bb_integrations_lib/models/rita/mapping.py +227 -0
- bb_integrations_lib/models/rita/probe.py +58 -0
- bb_integrations_lib/models/rita/reference_data.py +110 -0
- bb_integrations_lib/models/rita/source_system.py +9 -0
- bb_integrations_lib/models/rita/workers.py +76 -0
- bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
- bb_integrations_lib/models/sd/get_order.py +301 -0
- bb_integrations_lib/models/sd/orders.py +18 -0
- bb_integrations_lib/models/sd_api.py +115 -0
- bb_integrations_lib/pipelines/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
- bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
- bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
- bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
- bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
- bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
- bb_integrations_lib/pipelines/shared/__init__.py +0 -0
- bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
- bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
- bb_integrations_lib/pipelines/steps/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
- bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
- bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
- bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
- bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
- bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
- bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
- bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
- bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
- bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
- bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
- bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
- bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
- bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
- bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
- bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
- bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
- bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
- bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
- bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
- bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
- bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
- bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
- bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
- bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
- bb_integrations_lib/pipelines/steps/null_step.py +15 -0
- bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
- bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
- bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
- bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
- bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
- bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
- bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
- bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
- bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
- bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
- bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
- bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
- bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
- bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
- bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
- bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
- bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
- bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
- bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
- bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
- bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
- bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
- bb_integrations_lib/protocols/__init__.py +0 -0
- bb_integrations_lib/protocols/flat_file.py +210 -0
- bb_integrations_lib/protocols/gravitate_client.py +104 -0
- bb_integrations_lib/protocols/pipelines.py +697 -0
- bb_integrations_lib/provider/__init__.py +0 -0
- bb_integrations_lib/provider/api/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/client.py +43 -0
- bb_integrations_lib/provider/api/cargas/model.py +49 -0
- bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
- bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
- bb_integrations_lib/provider/api/dtn/client.py +128 -0
- bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
- bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
- bb_integrations_lib/provider/api/keyvu/client.py +30 -0
- bb_integrations_lib/provider/api/keyvu/model.py +149 -0
- bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/macropoint/client.py +28 -0
- bb_integrations_lib/provider/api/macropoint/model.py +40 -0
- bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
- bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
- bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
- bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
- bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
- bb_integrations_lib/provider/api/platform_science/client.py +147 -0
- bb_integrations_lib/provider/api/platform_science/model.py +82 -0
- bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
- bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
- bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/telapoint/client.py +68 -0
- bb_integrations_lib/provider/api/telapoint/model.py +178 -0
- bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
- bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
- bb_integrations_lib/provider/aws/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/client.py +126 -0
- bb_integrations_lib/provider/ftp/__init__.py +0 -0
- bb_integrations_lib/provider/ftp/client.py +140 -0
- bb_integrations_lib/provider/ftp/interface.py +273 -0
- bb_integrations_lib/provider/ftp/model.py +76 -0
- bb_integrations_lib/provider/imap/__init__.py +0 -0
- bb_integrations_lib/provider/imap/client.py +228 -0
- bb_integrations_lib/provider/imap/model.py +3 -0
- bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
- bb_integrations_lib/provider/sqlserver/client.py +106 -0
- bb_integrations_lib/secrets/__init__.py +4 -0
- bb_integrations_lib/secrets/adapters.py +98 -0
- bb_integrations_lib/secrets/credential_models.py +222 -0
- bb_integrations_lib/secrets/factory.py +85 -0
- bb_integrations_lib/secrets/providers.py +160 -0
- bb_integrations_lib/shared/__init__.py +0 -0
- bb_integrations_lib/shared/exceptions.py +25 -0
- bb_integrations_lib/shared/model.py +1039 -0
- bb_integrations_lib/shared/shared_enums.py +510 -0
- bb_integrations_lib/storage/README.md +236 -0
- bb_integrations_lib/storage/__init__.py +0 -0
- bb_integrations_lib/storage/aws/__init__.py +0 -0
- bb_integrations_lib/storage/aws/s3.py +8 -0
- bb_integrations_lib/storage/defaults.py +72 -0
- bb_integrations_lib/storage/gcs/__init__.py +0 -0
- bb_integrations_lib/storage/gcs/client.py +8 -0
- bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
- bb_integrations_lib/storage/gcsmanager/client.py +8 -0
- bb_integrations_lib/storage/setup.py +29 -0
- bb_integrations_lib/util/__init__.py +0 -0
- bb_integrations_lib/util/cache/__init__.py +0 -0
- bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
- bb_integrations_lib/util/cache/protocol.py +9 -0
- bb_integrations_lib/util/config/__init__.py +0 -0
- bb_integrations_lib/util/config/manager.py +391 -0
- bb_integrations_lib/util/config/model.py +41 -0
- bb_integrations_lib/util/exception_logger/__init__.py +0 -0
- bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
- bb_integrations_lib/util/exception_logger/test.py +114 -0
- bb_integrations_lib/util/utils.py +364 -0
- bb_integrations_lib/workers/__init__.py +0 -0
- bb_integrations_lib/workers/groups.py +13 -0
- bb_integrations_lib/workers/rpc_worker.py +50 -0
- bb_integrations_lib/workers/topics.py +20 -0
- bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
- bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
- bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from math import ceil
|
|
3
|
+
from time import sleep
|
|
4
|
+
from typing import List
|
|
5
|
+
|
|
6
|
+
from loguru import logger
|
|
7
|
+
from more_itertools import chunked
|
|
8
|
+
|
|
9
|
+
from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
|
|
10
|
+
from bb_integrations_lib.models.pipeline_structs import BBDUploadResult
|
|
11
|
+
from bb_integrations_lib.models.rita.issue import IssueBase, IssueCategory
|
|
12
|
+
from bb_integrations_lib.protocols.flat_file import TankReading
|
|
13
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BBDUploadTankReadingStep(Step):
|
|
17
|
+
"""
|
|
18
|
+
Takes a list of TankReading and uploads them to S&D, breaking them into chunks as required.
|
|
19
|
+
|
|
20
|
+
:param sd_client: The GravitateSDAPI client to use for uploading.
|
|
21
|
+
:param sleep_between: The number of seconds to sleep between each chunk.
|
|
22
|
+
:param chunk_size: The maximum number of readings to upload in each chunk.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
def __init__(self, sd_client: GravitateSDAPI, sleep_between: float = 0.5, chunk_size: int = 1000, *args, **kwargs):
|
|
26
|
+
super().__init__(*args, **kwargs)
|
|
27
|
+
self.sd_client = sd_client
|
|
28
|
+
self.sleep_between = sleep_between
|
|
29
|
+
self.chunk_size = chunk_size
|
|
30
|
+
|
|
31
|
+
def describe(self) -> str:
|
|
32
|
+
return "Upload TankReadings to BBD"
|
|
33
|
+
|
|
34
|
+
async def execute(self, i: List[TankReading]) -> BBDUploadResult:
|
|
35
|
+
try:
|
|
36
|
+
total_readings = len(i)
|
|
37
|
+
count = ceil(total_readings / self.chunk_size)
|
|
38
|
+
attempted = 0
|
|
39
|
+
succeeded = 0
|
|
40
|
+
included_set = set()
|
|
41
|
+
failed = []
|
|
42
|
+
for idx, group in enumerate(chunked(i, self.chunk_size)):
|
|
43
|
+
logger.info(f"Uploading readings to bestbuy {idx + 1} of {count}")
|
|
44
|
+
attempted += len(group)
|
|
45
|
+
group = [g.model_dump(mode="json") for g in group]
|
|
46
|
+
list({json.dumps(record, sort_keys=True): record for record in group}.values())
|
|
47
|
+
try:
|
|
48
|
+
response = await self.sd_client.upload_readings(group, raise_error=False)
|
|
49
|
+
response = response.json()
|
|
50
|
+
succeeded += len(response["ids"])
|
|
51
|
+
included_set = included_set.union(set(response["ids"]))
|
|
52
|
+
if len(response["unable to upload"]) > 0:
|
|
53
|
+
logger.error(f"Errors occurred while uploading data: {response}")
|
|
54
|
+
failed = failed + response["unable to upload"]
|
|
55
|
+
sleep(self.sleep_between)
|
|
56
|
+
except Exception as e:
|
|
57
|
+
logger.error(f"Batch {idx} readings failed | {e}")
|
|
58
|
+
failed = failed + [g["store"] for g in group]
|
|
59
|
+
continue
|
|
60
|
+
logger.info(f"Successfully uploaded {succeeded} of {attempted} readings.")
|
|
61
|
+
included_set = {x.split(":")[0] for x in included_set}
|
|
62
|
+
|
|
63
|
+
if len(failed) > 0:
|
|
64
|
+
logger.info(f"Failed to upload {len(failed)} of {attempted} readings")
|
|
65
|
+
fc = self.pipeline_context.file_config
|
|
66
|
+
key = f"{fc.config_id}_failed_to_upload"
|
|
67
|
+
self.pipeline_context.issues.append(IssueBase(
|
|
68
|
+
key=key,
|
|
69
|
+
config_id=fc.config_id,
|
|
70
|
+
name="Failed to upload TankReadings",
|
|
71
|
+
category=IssueCategory.TANK_READING,
|
|
72
|
+
problem_short=f"{len(failed)} readings did not upload",
|
|
73
|
+
problem_long=json.dumps(failed)
|
|
74
|
+
))
|
|
75
|
+
|
|
76
|
+
return BBDUploadResult(succeeded=succeeded, failed=attempted - succeeded,
|
|
77
|
+
succeeded_items=list(included_set))
|
|
78
|
+
except Exception as e:
|
|
79
|
+
logger.exception(f"Unable to upload | {e}")
|
|
80
|
+
raise e
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import AsyncGenerator
|
|
3
|
+
|
|
4
|
+
import pandas as pd
|
|
5
|
+
from loguru import logger
|
|
6
|
+
|
|
7
|
+
from bb_integrations_lib.gravitate.rita_api import GravitateRitaAPI
|
|
8
|
+
from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
|
|
9
|
+
from bb_integrations_lib.mappers.rita_mapper import RitaMapperCore, RitaMapper, RitaAPIMappingProvider
|
|
10
|
+
from bb_integrations_lib.models.rita.mapping import MappingType
|
|
11
|
+
from bb_integrations_lib.protocols.pipelines import GeneratorStep
|
|
12
|
+
from bb_integrations_lib.provider.api.cargas.model import CreateWholesaleTicketRequestBundle, \
|
|
13
|
+
CreateWholesaleTicketRequest, CreateWholesaleLineRequest
|
|
14
|
+
from bb_integrations_lib.util.config.manager import GlobalConfigManager
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ConvertBBDOrderToCargasWholesaleStep(GeneratorStep):
|
|
18
|
+
def __init__(self, rita_client: GravitateRitaAPI, sd_client: GravitateSDAPI, mapping_source_system: str = "Cargas", *args, **kwargs):
|
|
19
|
+
"""
|
|
20
|
+
Convert a BBD order's drops to Cargas wholesale ticket request objects. Input should be an item returned by the
|
|
21
|
+
v1/bols_and_drops endpoint, and should only include orders with at least 1 drop at a wholesale store. Passing an
|
|
22
|
+
order with no wholesale drops will result in a ticket request being generated with no line items.
|
|
23
|
+
|
|
24
|
+
Mappings are best-effort; if a field that is not strictly required is not mapped, it will be left out.
|
|
25
|
+
|
|
26
|
+
:param rita_client: The GravitateRitaAPI instance to use to get mappings with.
|
|
27
|
+
:param sd_client: The GravitateSDAPI instance to use for API calls.
|
|
28
|
+
:param mapping_source_system: The source system to use for mapping. Defaults to "Cargas".
|
|
29
|
+
"""
|
|
30
|
+
super().__init__(*args, **kwargs)
|
|
31
|
+
self.rita_client = rita_client
|
|
32
|
+
self.sd_client = sd_client
|
|
33
|
+
self.mapping_source_system = mapping_source_system
|
|
34
|
+
|
|
35
|
+
self.store_lkp = {}
|
|
36
|
+
self.driver_sched_lkp = {}
|
|
37
|
+
# Only optional because it will be late initialized by either _load_lookups or _set_test_lookups.
|
|
38
|
+
self.rita_mapper: RitaMapper | None = None
|
|
39
|
+
|
|
40
|
+
def describe(self) -> str:
|
|
41
|
+
return f"Convert drops on a BBD order to Cargas create wholesale ticket / ticket line requests"
|
|
42
|
+
|
|
43
|
+
async def _load_lookups(self, driver_schedules: set[str]):
|
|
44
|
+
self.gcm = GlobalConfigManager()
|
|
45
|
+
|
|
46
|
+
stores = await self.sd_client.get_all_stores()
|
|
47
|
+
stores.raise_for_status()
|
|
48
|
+
self.store_lkp = {store["_id"]: store for store in stores.json()}
|
|
49
|
+
|
|
50
|
+
drivers = await self.sd_client.get_driver_tracking(driver_schedule_ids=list(driver_schedules))
|
|
51
|
+
drivers.raise_for_status()
|
|
52
|
+
self.driver_sched_lkp = {driver["driver_schedule_id"]: driver for driver in drivers.json()}
|
|
53
|
+
|
|
54
|
+
self.rita_mapper = RitaMapper(
|
|
55
|
+
provider=RitaAPIMappingProvider(self.rita_client),
|
|
56
|
+
source_system=self.mapping_source_system
|
|
57
|
+
)
|
|
58
|
+
await self.rita_mapper.load_mappings_async()
|
|
59
|
+
|
|
60
|
+
def _set_test_lookups(self, store_lkp: dict, driver_sched_lkp: dict, rita_mapper: RitaMapperCore):
|
|
61
|
+
self.store_lkp = store_lkp
|
|
62
|
+
self.driver_sched_lkp = driver_sched_lkp
|
|
63
|
+
self.rita_mapper = rita_mapper
|
|
64
|
+
|
|
65
|
+
async def generator(self, orders: list[dict]) -> AsyncGenerator[CreateWholesaleTicketRequestBundle, None]:
|
|
66
|
+
driver_schedules = set([
|
|
67
|
+
abol["driver_schedule"] for order in orders for abol in order["allocated_bols"]
|
|
68
|
+
])
|
|
69
|
+
await self._load_lookups(driver_schedules=driver_schedules)
|
|
70
|
+
|
|
71
|
+
assert self.rita_mapper is not None, "RITA mapper must be initialized before generator is called"
|
|
72
|
+
|
|
73
|
+
for order in orders:
|
|
74
|
+
for result in self.convert_order(order):
|
|
75
|
+
yield result
|
|
76
|
+
|
|
77
|
+
def convert_order(self, order: dict) -> list[CreateWholesaleTicketRequestBundle]:
|
|
78
|
+
"""
|
|
79
|
+
Converts an S&D order dictionary into one or more Cargas ticket request bundle objects, including line items.
|
|
80
|
+
Line items produced by this function will have a blank DocumentID, which should be filled in with the
|
|
81
|
+
DocumentID that CreateWholesaleTicket returns.
|
|
82
|
+
"""
|
|
83
|
+
order_number = order["order_number"]
|
|
84
|
+
allocated_bols = order["allocated_bols"]
|
|
85
|
+
if not allocated_bols:
|
|
86
|
+
raise Exception("Could not convert order: no allocated BOLs")
|
|
87
|
+
|
|
88
|
+
bols_df = pd.DataFrame.from_records(allocated_bols)
|
|
89
|
+
# Precompute destination counterparty - could be 2 different stores of same counterparty on a split load
|
|
90
|
+
bols_df["store_counterparty_name"] = bols_df.apply(
|
|
91
|
+
lambda x: self.store_lkp[x["store_id"]]["counterparty_name"],
|
|
92
|
+
axis="columns"
|
|
93
|
+
)
|
|
94
|
+
bols_df["driver_name"] = bols_df.apply(
|
|
95
|
+
lambda x: self.driver_sched_lkp.get(x["driver_schedule"], {}).get("driver_name", ""),
|
|
96
|
+
axis="columns"
|
|
97
|
+
)
|
|
98
|
+
bols_df["tractor"] = bols_df.apply(
|
|
99
|
+
lambda x: self.driver_sched_lkp.get(x["driver_schedule"], {}).get("driver_log", {}).get("tractor", ""),
|
|
100
|
+
axis="columns"
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
results = []
|
|
104
|
+
bol_gb = bols_df.groupby(by=["bol_number", "store_counterparty_name", "driver_name", "tractor"])
|
|
105
|
+
for (bol_number, store_cp_name, driver_name, tractor), group in bol_gb:
|
|
106
|
+
mcid_str = None
|
|
107
|
+
try:
|
|
108
|
+
mcid_str = self.rita_mapper.get_source_parent_id(store_cp_name, MappingType.counterparty)
|
|
109
|
+
mapped_customer_id = int(mcid_str)
|
|
110
|
+
except KeyError as e:
|
|
111
|
+
mapped_customer_id = None
|
|
112
|
+
logger.warning(f"Failed to map store counterparty {store_cp_name}: {e}, skipping drops")
|
|
113
|
+
continue
|
|
114
|
+
except ValueError as e:
|
|
115
|
+
logger.warning(f"Failed to convert mapped customer ID '{mcid_str}' to int, skipping drops")
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
try:
|
|
119
|
+
mapped_driver_id = self.rita_mapper.get_source_parent_id(driver_name, MappingType.driver)
|
|
120
|
+
except KeyError:
|
|
121
|
+
logger.warning(f"Failed to map driver ID '{driver_name}', it will be blank on the ticket")
|
|
122
|
+
mapped_driver_id = None
|
|
123
|
+
try:
|
|
124
|
+
mapped_tractor_id = self.rita_mapper.get_source_parent_id(tractor, MappingType.tractor)
|
|
125
|
+
except KeyError:
|
|
126
|
+
logger.warning(f"Failed to map tractor ID '{tractor}', it will be blank on the ticket")
|
|
127
|
+
mapped_tractor_id = None
|
|
128
|
+
|
|
129
|
+
try:
|
|
130
|
+
ticket_request = CreateWholesaleTicketRequest(
|
|
131
|
+
CustomerID=mapped_customer_id,
|
|
132
|
+
DeliveryDate=datetime.fromisoformat(max(group["delivered_date"])),
|
|
133
|
+
CustomerPONumber=bol_number,
|
|
134
|
+
DriverID=mapped_driver_id,
|
|
135
|
+
WholesaleTruckID=mapped_tractor_id,
|
|
136
|
+
Message="",
|
|
137
|
+
InvoiceNotes="",
|
|
138
|
+
CostCenterID=None,
|
|
139
|
+
SubTypeID=None,
|
|
140
|
+
SalespersonID=None,
|
|
141
|
+
AdditionalNotes="",
|
|
142
|
+
UserName="Gravitate"
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
line_requests = []
|
|
146
|
+
for index, bol_item in group.iterrows():
|
|
147
|
+
try:
|
|
148
|
+
store_number = bol_item["store_number"]
|
|
149
|
+
tank_number = str(bol_item["store_tank"])
|
|
150
|
+
mtid_str = None
|
|
151
|
+
try:
|
|
152
|
+
mtid_str = self.rita_mapper.get_source_child_id(
|
|
153
|
+
gravitate_parent_id=store_number, gravitate_child_id=tank_number,
|
|
154
|
+
mapping_type=MappingType.tank
|
|
155
|
+
)
|
|
156
|
+
mapped_tank_id = int(mtid_str)
|
|
157
|
+
except KeyError as e:
|
|
158
|
+
logger.warning(f"Failed to map tank '{tank_number}', skipping BOL item: {e}")
|
|
159
|
+
continue
|
|
160
|
+
except ValueError as e:
|
|
161
|
+
logger.warning(
|
|
162
|
+
f"Failed to convert mapped tank '{tank_number}'->'{mtid_str}' to int, skipping BOL item: {e}")
|
|
163
|
+
continue
|
|
164
|
+
|
|
165
|
+
miid_str = None
|
|
166
|
+
bol_product = bol_item["bol_product"]
|
|
167
|
+
try:
|
|
168
|
+
miid_str = self.rita_mapper.get_source_parent_id(
|
|
169
|
+
gravitate_id=bol_product, mapping_type=MappingType.product
|
|
170
|
+
)
|
|
171
|
+
mapped_item_id = int(miid_str)
|
|
172
|
+
except KeyError as e:
|
|
173
|
+
logger.warning(f"Failed to map product '{bol_product}', skipping BOL item: {e}")
|
|
174
|
+
continue
|
|
175
|
+
except ValueError as e:
|
|
176
|
+
logger.warning(
|
|
177
|
+
f"Failed to convert mapped product '{bol_product}'->'{miid_str}', skipping BOL item: {e}")
|
|
178
|
+
continue
|
|
179
|
+
|
|
180
|
+
# We could theoretically be handed an order with no wholesale store drops, in which case we'll
|
|
181
|
+
# upload a wholesale ticket with no line items. However, upstream should prefilter out any
|
|
182
|
+
# orders with no wholesale store drops.
|
|
183
|
+
if self.store_lkp[bol_item["store_id"]].get("extra_data", {}).get("type") != "Wholesale":
|
|
184
|
+
logger.info(
|
|
185
|
+
f"Skipping drop at non-wholesale store '{group['store_number']}' "
|
|
186
|
+
f"(order {order_number}, BOL {bol_number})"
|
|
187
|
+
)
|
|
188
|
+
continue
|
|
189
|
+
|
|
190
|
+
line_requests.append(
|
|
191
|
+
CreateWholesaleLineRequest(
|
|
192
|
+
DocumentID=-1,
|
|
193
|
+
ItemID=mapped_item_id,
|
|
194
|
+
TankID=mapped_tank_id,
|
|
195
|
+
Quantity=bol_item["bol_net_volume_allocated"],
|
|
196
|
+
QuantityGross=bol_item["bol_gross_volume_allocated"],
|
|
197
|
+
UnitPrice=bol_item.get("price", {}).get("price"),
|
|
198
|
+
FreightRateID=None, # TODO: Retrieve? May not be necessary
|
|
199
|
+
VendorLocationID=None,
|
|
200
|
+
FreightAmount=None, # TODO: Retrieve? May not be necessary
|
|
201
|
+
SurchargeAmount=None,
|
|
202
|
+
UnitCostOverride=None,
|
|
203
|
+
CustomerPricingID=None,
|
|
204
|
+
UserName="Gravitate"
|
|
205
|
+
)
|
|
206
|
+
)
|
|
207
|
+
except Exception as e:
|
|
208
|
+
logger.warning(f"Could not convert bol item #{index} on order {order_number}: {e}")
|
|
209
|
+
continue
|
|
210
|
+
|
|
211
|
+
if not line_requests:
|
|
212
|
+
logger.warning(
|
|
213
|
+
f"No line requests for order {order_number}; will upload a ticket request but it will be empty")
|
|
214
|
+
results.append(CreateWholesaleTicketRequestBundle(
|
|
215
|
+
ticket_request=ticket_request,
|
|
216
|
+
line_requests=line_requests
|
|
217
|
+
))
|
|
218
|
+
|
|
219
|
+
except Exception as e:
|
|
220
|
+
logger.warning(f"Could not convert order {order_number}, BOL {bol_number}: {e}")
|
|
221
|
+
continue
|
|
222
|
+
|
|
223
|
+
return results
|
|
224
|
+
|
|
225
|
+
def get_store_cp_name(self, store_id: str) -> str:
|
|
226
|
+
return self.store_lkp[store_id]["counterparty_name"]
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Dict, Any
|
|
3
|
+
|
|
4
|
+
import loguru
|
|
5
|
+
|
|
6
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
7
|
+
from bb_integrations_lib.provider.ftp.client import FTPIntegrationClient
|
|
8
|
+
from bb_integrations_lib.shared.model import FileConfigRawData, RawData
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class DeleteSFTPStep(Step):
|
|
12
|
+
def __init__(self, ftp_client: FTPIntegrationClient, src_directory: str | None = None, *args, **kwargs) -> None:
|
|
13
|
+
super().__init__(*args, **kwargs)
|
|
14
|
+
self.ftp_client = ftp_client
|
|
15
|
+
self.src_directory = src_directory
|
|
16
|
+
|
|
17
|
+
def describe(self) -> str:
|
|
18
|
+
return "SFTP Delete Step"
|
|
19
|
+
|
|
20
|
+
async def execute(self, i: Any) -> Any:
|
|
21
|
+
if isinstance(i, FileConfigRawData):
|
|
22
|
+
filename = os.path.join(i.file_config.inbound_directory, i.file_name)
|
|
23
|
+
elif isinstance(i, RawData):
|
|
24
|
+
if self.src_directory is None:
|
|
25
|
+
raise RuntimeError("Attempted to delete a RawData object but src_directory was not set.")
|
|
26
|
+
filename = os.path.join(self.src_directory, i.file_name)
|
|
27
|
+
else:
|
|
28
|
+
raise NotImplementedError(f"Unsupported input type: {type(i)}")
|
|
29
|
+
try:
|
|
30
|
+
self.ftp_client.delete_file(filename)
|
|
31
|
+
except Exception as e:
|
|
32
|
+
loguru.logger.warning(f"Failed to delete: {e}")
|
|
33
|
+
return i
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
import itertools
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from operator import attrgetter
|
|
4
|
+
from typing import Callable
|
|
5
|
+
|
|
6
|
+
from bb_integrations_lib.models.dtn_supplier_invoice import DTNSupplierInvoice, Item, ItemTax, SummaryTax, \
|
|
7
|
+
DeferredTaxItem
|
|
8
|
+
from bb_integrations_lib.protocols.pipelines import Step
|
|
9
|
+
from bb_integrations_lib.shared.model import SDSupplierInvoiceCreateRequest, SDSupplierInvoiceDetail, \
|
|
10
|
+
SDSupplierInvoiceDetailType
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ConvertDTNInvoiceToSDModel(Step):
|
|
14
|
+
def __init__(self, *args, **kwargs):
|
|
15
|
+
super().__init__(*args, **kwargs)
|
|
16
|
+
|
|
17
|
+
def describe(self) -> str:
|
|
18
|
+
return "Convert DTN invoice data model to SD supplier invoice data model"
|
|
19
|
+
|
|
20
|
+
async def execute(self, i: DTNSupplierInvoice) -> SDSupplierInvoiceCreateRequest:
|
|
21
|
+
return self.convert_dtn_invoice(i)
|
|
22
|
+
|
|
23
|
+
def convert_dtn_invoice_deferred_tax_item(self, deferred_tax: DeferredTaxItem) -> SDSupplierInvoiceDetail:
|
|
24
|
+
return SDSupplierInvoiceDetail(
|
|
25
|
+
bol_number=None, # Not present for summary taxes, since they're invoice level
|
|
26
|
+
bol_date_local=None,
|
|
27
|
+
type=SDSupplierInvoiceDetailType.tax,
|
|
28
|
+
product=None,
|
|
29
|
+
tax_type=None,
|
|
30
|
+
tax_description=deferred_tax.description,
|
|
31
|
+
tax_authority=None,
|
|
32
|
+
tax_non_deferred=False,
|
|
33
|
+
rate=deferred_tax.amount,
|
|
34
|
+
amount=1,
|
|
35
|
+
total=deferred_tax.amount,
|
|
36
|
+
uom=None,
|
|
37
|
+
gross_volume=None,
|
|
38
|
+
net_volume=None,
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
def convert_dtn_invoice_summary_tax(self, summary_tax: SummaryTax) -> SDSupplierInvoiceDetail:
|
|
42
|
+
return SDSupplierInvoiceDetail(
|
|
43
|
+
bol_number=None, # Not present for summary taxes, since they're invoice level
|
|
44
|
+
bol_date_local=None,
|
|
45
|
+
type=SDSupplierInvoiceDetailType.tax,
|
|
46
|
+
product=None,
|
|
47
|
+
tax_type=summary_tax.tax_code,
|
|
48
|
+
tax_description=summary_tax.description,
|
|
49
|
+
tax_authority=None,
|
|
50
|
+
tax_non_deferred=not summary_tax.deferred,
|
|
51
|
+
rate=summary_tax.rate,
|
|
52
|
+
amount=summary_tax.quantity_billed,
|
|
53
|
+
total=summary_tax.line_total,
|
|
54
|
+
uom=summary_tax.unit_of_measure,
|
|
55
|
+
gross_volume=None,
|
|
56
|
+
net_volume=None,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
def convert_dtn_invoice_item_tax(
|
|
60
|
+
self, item_tax: ItemTax, item_product: str, bol_date: datetime | None
|
|
61
|
+
) -> SDSupplierInvoiceDetail:
|
|
62
|
+
return SDSupplierInvoiceDetail(
|
|
63
|
+
bol_number=item_tax.bol_number,
|
|
64
|
+
bol_date_local=bol_date,
|
|
65
|
+
type=SDSupplierInvoiceDetailType.tax,
|
|
66
|
+
product={"source_name": item_product},
|
|
67
|
+
tax_type=item_tax.tax_code,
|
|
68
|
+
tax_description=item_tax.description,
|
|
69
|
+
tax_authority=None,
|
|
70
|
+
tax_non_deferred=not item_tax.deferred,
|
|
71
|
+
rate=item_tax.rate,
|
|
72
|
+
amount=item_tax.quantity_billed,
|
|
73
|
+
total=item_tax.line_total,
|
|
74
|
+
uom=item_tax.unit_of_measure,
|
|
75
|
+
gross_volume=item_tax.quantity_billed,
|
|
76
|
+
net_volume=item_tax.quantity_billed,
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
def convert_dtn_invoice_item(self, invoice_detail: Item) -> list[SDSupplierInvoiceDetail]:
|
|
80
|
+
product = invoice_detail.description
|
|
81
|
+
bol_date = invoice_detail.ship_datetime
|
|
82
|
+
item_detail = SDSupplierInvoiceDetail(
|
|
83
|
+
bol_number=invoice_detail.bol_number,
|
|
84
|
+
bol_date_local=bol_date,
|
|
85
|
+
type=SDSupplierInvoiceDetailType.supply,
|
|
86
|
+
product={"source_name": product},
|
|
87
|
+
rate=invoice_detail.rate,
|
|
88
|
+
amount=invoice_detail.quantity_billed,
|
|
89
|
+
total=invoice_detail.line_total,
|
|
90
|
+
uom=invoice_detail.unit_of_measure,
|
|
91
|
+
gross_volume=invoice_detail.gross_quantity or invoice_detail.quantity_billed,
|
|
92
|
+
net_volume=invoice_detail.net_quantity or invoice_detail.quantity_billed
|
|
93
|
+
)
|
|
94
|
+
tax_details = [
|
|
95
|
+
self.convert_dtn_invoice_item_tax(tax, product, bol_date) for tax in invoice_detail.tax_records or []
|
|
96
|
+
]
|
|
97
|
+
# Should we support RINS numbers here? How?
|
|
98
|
+
return [item_detail] + tax_details
|
|
99
|
+
|
|
100
|
+
@staticmethod
|
|
101
|
+
def resolve_field(invoice: DTNSupplierInvoice, attr_getter: Callable[[Item], str]) -> str | None:
|
|
102
|
+
"""
|
|
103
|
+
Resolve a specific field from the invoice line items, if they are all the same.
|
|
104
|
+
:return: The return value retrieved by calling attr_getter on each invoice item, if all are the same (or if
|
|
105
|
+
only one item is present), or None if no items are present or if any items disagree with each other.
|
|
106
|
+
"""
|
|
107
|
+
# Get a list of terminals from the invoice line items, where the terminal is not blank
|
|
108
|
+
item_fields = list(map(attr_getter, invoice.items))
|
|
109
|
+
if len(set(item_fields)) == 1:
|
|
110
|
+
return item_fields[0]
|
|
111
|
+
return None
|
|
112
|
+
|
|
113
|
+
def convert_dtn_invoice(self, invoice: DTNSupplierInvoice) -> SDSupplierInvoiceCreateRequest:
|
|
114
|
+
deferred_tax_details = []
|
|
115
|
+
summary_tax_details = []
|
|
116
|
+
if invoice.deferred_taxes is not None:
|
|
117
|
+
deferred_tax_details.extend([
|
|
118
|
+
self.convert_dtn_invoice_deferred_tax_item(detail_item)
|
|
119
|
+
for detail_item in invoice.deferred_taxes.items
|
|
120
|
+
])
|
|
121
|
+
for summary_tax in invoice.summary_taxes:
|
|
122
|
+
summary_tax_details.append(
|
|
123
|
+
self.convert_dtn_invoice_summary_tax(summary_tax)
|
|
124
|
+
)
|
|
125
|
+
return SDSupplierInvoiceCreateRequest(
|
|
126
|
+
invoice_number=invoice.header.invoice_number,
|
|
127
|
+
source_name=invoice.header.sold_to_name,
|
|
128
|
+
supplier=invoice.header.seller_name,
|
|
129
|
+
terminal=self.resolve_field(invoice, attrgetter("ship_from_name")) or None,
|
|
130
|
+
due_date_local=invoice.header.invoice_due_date,
|
|
131
|
+
# Convert date to datetime at midnight
|
|
132
|
+
invoice_date_local=datetime.combine(invoice.header.invoice_date, datetime.min.time()),
|
|
133
|
+
# Get a flattened list, since each call to convert_dtn_invoice_item yields a list of 1 or more detail items
|
|
134
|
+
# Then tack on summary and deferred tax details, when applicable.
|
|
135
|
+
details=list(itertools.chain.from_iterable(
|
|
136
|
+
[self.convert_dtn_invoice_item(item) for item in invoice.items]
|
|
137
|
+
)) + summary_tax_details + deferred_tax_details,
|
|
138
|
+
extra_data={
|
|
139
|
+
"invoice_filename": invoice.original_filename
|
|
140
|
+
} if invoice.original_filename else {},
|
|
141
|
+
ship_to_city=self.resolve_field(invoice, attrgetter("ship_to_city")) or None,
|
|
142
|
+
ship_to_state=self.resolve_field(invoice, attrgetter("ship_to_state")) or None,
|
|
143
|
+
ship_from_city=self.resolve_field(invoice, attrgetter("ship_from_city")) or None,
|
|
144
|
+
ship_from_state=self.resolve_field(invoice, attrgetter("ship_from_state")) or None,
|
|
145
|
+
)
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from io import TextIOWrapper
|
|
2
|
+
from loguru import logger
|
|
3
|
+
from typing import Dict, AsyncIterable
|
|
4
|
+
|
|
5
|
+
from bb_integrations_lib.models.dtn_supplier_invoice import DTNSupplierInvoice, Parser
|
|
6
|
+
from bb_integrations_lib.protocols.pipelines import GeneratorStep
|
|
7
|
+
from bb_integrations_lib.shared.model import RawData
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ParseDTNInvoiceStep(GeneratorStep):
|
|
11
|
+
def __init__(self, skip_failed: bool = True, *args, **kwargs) -> None:
|
|
12
|
+
"""
|
|
13
|
+
Parse a DTN invoice and convert it into one or more modeled invoice items.
|
|
14
|
+
|
|
15
|
+
:param skip_failed: Whether to skip any invoices in this file that fail to parse.
|
|
16
|
+
"""
|
|
17
|
+
super().__init__(*args, **kwargs)
|
|
18
|
+
self.skip_failed = skip_failed
|
|
19
|
+
|
|
20
|
+
def describe(self) -> str:
|
|
21
|
+
return "Parse a DTN supplier invoice file into one or more invoice models"
|
|
22
|
+
|
|
23
|
+
async def generator(self, i: RawData) -> AsyncIterable[DTNSupplierInvoice]:
|
|
24
|
+
try:
|
|
25
|
+
p = Parser()
|
|
26
|
+
tio = TextIOWrapper(i.data, encoding="utf-8")
|
|
27
|
+
tio.seek(0)
|
|
28
|
+
res = p.parse(tio)
|
|
29
|
+
tio.detach() # Don't allow the CSV module to close the underlying BytesIO in case another step wants to use it
|
|
30
|
+
for invoice in res:
|
|
31
|
+
invoice.original_filename = i.file_name
|
|
32
|
+
yield invoice
|
|
33
|
+
except Exception as e:
|
|
34
|
+
if self.skip_failed:
|
|
35
|
+
logger.exception(f"Failed to process file {i.file_name}, skipping")
|
|
36
|
+
return
|
|
37
|
+
else:
|
|
38
|
+
raise e
|