bb-integrations-library 3.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bb_integrations_lib/__init__.py +0 -0
- bb_integrations_lib/converters/__init__.py +0 -0
- bb_integrations_lib/gravitate/__init__.py +0 -0
- bb_integrations_lib/gravitate/base_api.py +20 -0
- bb_integrations_lib/gravitate/model.py +29 -0
- bb_integrations_lib/gravitate/pe_api.py +122 -0
- bb_integrations_lib/gravitate/rita_api.py +552 -0
- bb_integrations_lib/gravitate/sd_api.py +572 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
- bb_integrations_lib/gravitate/testing/__init__.py +0 -0
- bb_integrations_lib/gravitate/testing/builder.py +55 -0
- bb_integrations_lib/gravitate/testing/openapi.py +70 -0
- bb_integrations_lib/gravitate/testing/util.py +274 -0
- bb_integrations_lib/mappers/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/model.py +106 -0
- bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
- bb_integrations_lib/mappers/prices/protocol.py +20 -0
- bb_integrations_lib/mappers/prices/util.py +61 -0
- bb_integrations_lib/mappers/rita_mapper.py +523 -0
- bb_integrations_lib/models/__init__.py +0 -0
- bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
- bb_integrations_lib/models/enums.py +28 -0
- bb_integrations_lib/models/pipeline_structs.py +76 -0
- bb_integrations_lib/models/probe/probe_event.py +20 -0
- bb_integrations_lib/models/probe/request_data.py +431 -0
- bb_integrations_lib/models/probe/resume_token.py +7 -0
- bb_integrations_lib/models/rita/audit.py +113 -0
- bb_integrations_lib/models/rita/auth.py +30 -0
- bb_integrations_lib/models/rita/bucket.py +17 -0
- bb_integrations_lib/models/rita/config.py +188 -0
- bb_integrations_lib/models/rita/constants.py +19 -0
- bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
- bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
- bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
- bb_integrations_lib/models/rita/crossroads_network.py +41 -0
- bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
- bb_integrations_lib/models/rita/email.py +39 -0
- bb_integrations_lib/models/rita/issue.py +63 -0
- bb_integrations_lib/models/rita/mapping.py +227 -0
- bb_integrations_lib/models/rita/probe.py +58 -0
- bb_integrations_lib/models/rita/reference_data.py +110 -0
- bb_integrations_lib/models/rita/source_system.py +9 -0
- bb_integrations_lib/models/rita/workers.py +76 -0
- bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
- bb_integrations_lib/models/sd/get_order.py +301 -0
- bb_integrations_lib/models/sd/orders.py +18 -0
- bb_integrations_lib/models/sd_api.py +115 -0
- bb_integrations_lib/pipelines/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
- bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
- bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
- bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
- bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
- bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
- bb_integrations_lib/pipelines/shared/__init__.py +0 -0
- bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
- bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
- bb_integrations_lib/pipelines/steps/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
- bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
- bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
- bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
- bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
- bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
- bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
- bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
- bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
- bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
- bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
- bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
- bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
- bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
- bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
- bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
- bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
- bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
- bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
- bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
- bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
- bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
- bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
- bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
- bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
- bb_integrations_lib/pipelines/steps/null_step.py +15 -0
- bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
- bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
- bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
- bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
- bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
- bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
- bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
- bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
- bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
- bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
- bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
- bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
- bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
- bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
- bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
- bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
- bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
- bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
- bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
- bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
- bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
- bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
- bb_integrations_lib/protocols/__init__.py +0 -0
- bb_integrations_lib/protocols/flat_file.py +210 -0
- bb_integrations_lib/protocols/gravitate_client.py +104 -0
- bb_integrations_lib/protocols/pipelines.py +697 -0
- bb_integrations_lib/provider/__init__.py +0 -0
- bb_integrations_lib/provider/api/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/client.py +43 -0
- bb_integrations_lib/provider/api/cargas/model.py +49 -0
- bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
- bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
- bb_integrations_lib/provider/api/dtn/client.py +128 -0
- bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
- bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
- bb_integrations_lib/provider/api/keyvu/client.py +30 -0
- bb_integrations_lib/provider/api/keyvu/model.py +149 -0
- bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/macropoint/client.py +28 -0
- bb_integrations_lib/provider/api/macropoint/model.py +40 -0
- bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
- bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
- bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
- bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
- bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
- bb_integrations_lib/provider/api/platform_science/client.py +147 -0
- bb_integrations_lib/provider/api/platform_science/model.py +82 -0
- bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
- bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
- bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/telapoint/client.py +68 -0
- bb_integrations_lib/provider/api/telapoint/model.py +178 -0
- bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
- bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
- bb_integrations_lib/provider/aws/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/client.py +126 -0
- bb_integrations_lib/provider/ftp/__init__.py +0 -0
- bb_integrations_lib/provider/ftp/client.py +140 -0
- bb_integrations_lib/provider/ftp/interface.py +273 -0
- bb_integrations_lib/provider/ftp/model.py +76 -0
- bb_integrations_lib/provider/imap/__init__.py +0 -0
- bb_integrations_lib/provider/imap/client.py +228 -0
- bb_integrations_lib/provider/imap/model.py +3 -0
- bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
- bb_integrations_lib/provider/sqlserver/client.py +106 -0
- bb_integrations_lib/secrets/__init__.py +4 -0
- bb_integrations_lib/secrets/adapters.py +98 -0
- bb_integrations_lib/secrets/credential_models.py +222 -0
- bb_integrations_lib/secrets/factory.py +85 -0
- bb_integrations_lib/secrets/providers.py +160 -0
- bb_integrations_lib/shared/__init__.py +0 -0
- bb_integrations_lib/shared/exceptions.py +25 -0
- bb_integrations_lib/shared/model.py +1039 -0
- bb_integrations_lib/shared/shared_enums.py +510 -0
- bb_integrations_lib/storage/README.md +236 -0
- bb_integrations_lib/storage/__init__.py +0 -0
- bb_integrations_lib/storage/aws/__init__.py +0 -0
- bb_integrations_lib/storage/aws/s3.py +8 -0
- bb_integrations_lib/storage/defaults.py +72 -0
- bb_integrations_lib/storage/gcs/__init__.py +0 -0
- bb_integrations_lib/storage/gcs/client.py +8 -0
- bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
- bb_integrations_lib/storage/gcsmanager/client.py +8 -0
- bb_integrations_lib/storage/setup.py +29 -0
- bb_integrations_lib/util/__init__.py +0 -0
- bb_integrations_lib/util/cache/__init__.py +0 -0
- bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
- bb_integrations_lib/util/cache/protocol.py +9 -0
- bb_integrations_lib/util/config/__init__.py +0 -0
- bb_integrations_lib/util/config/manager.py +391 -0
- bb_integrations_lib/util/config/model.py +41 -0
- bb_integrations_lib/util/exception_logger/__init__.py +0 -0
- bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
- bb_integrations_lib/util/exception_logger/test.py +114 -0
- bb_integrations_lib/util/utils.py +364 -0
- bb_integrations_lib/workers/__init__.py +0 -0
- bb_integrations_lib/workers/groups.py +13 -0
- bb_integrations_lib/workers/rpc_worker.py +50 -0
- bb_integrations_lib/workers/topics.py +20 -0
- bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
- bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
- bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from io import StringIO
|
|
2
|
+
import numpy as np
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from bb_integrations_lib.protocols.pipelines import Parser
|
|
5
|
+
from typing import override
|
|
6
|
+
from pandas import DataFrame
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class OrderBySiteProductParser(Parser):
|
|
10
|
+
def __init__(self, tenant_name: str, source_system: str | None = None):
|
|
11
|
+
super().__init__(source_system, tenant_name)
|
|
12
|
+
|
|
13
|
+
def __repr__(self) -> str:
|
|
14
|
+
return "Order by site and product parser"
|
|
15
|
+
|
|
16
|
+
@override
|
|
17
|
+
async def parse(self, data: str, mapping_type: str | None = None) -> DataFrame:
|
|
18
|
+
orders = pd.read_csv(StringIO(data))
|
|
19
|
+
orders_columns = ['order_number', 'site', 'site_name', 'component_product', 'finished_product',
|
|
20
|
+
'component_volume',
|
|
21
|
+
'contract', 'market']
|
|
22
|
+
orders = orders[orders_columns]
|
|
23
|
+
orders['contract_type'] = np.where(orders['contract'].isna(), 'rack', 'contract')
|
|
24
|
+
grouped_sum = orders.groupby(['site', 'component_product', 'contract_type', 'market'])[
|
|
25
|
+
'component_volume'].sum().reset_index()
|
|
26
|
+
|
|
27
|
+
pivot_df = grouped_sum.pivot_table(
|
|
28
|
+
index=['site', 'component_product'],
|
|
29
|
+
values=['component_volume'],
|
|
30
|
+
columns=['contract_type'],
|
|
31
|
+
aggfunc='sum',
|
|
32
|
+
fill_value=0
|
|
33
|
+
)
|
|
34
|
+
pivot_df.columns = ['_'.join(col).strip() if isinstance(col, tuple) else col for col in pivot_df.columns]
|
|
35
|
+
pivot_df['component_volume_contract'] = pivot_df.get('component_volume_contract', 0)
|
|
36
|
+
pivot_df['component_volume_rack'] = pivot_df.get('component_volume_rack', 0)
|
|
37
|
+
pivot_df['total_product'] = pivot_df['component_volume_contract'] + pivot_df['component_volume_rack']
|
|
38
|
+
pivot_df['pct_contract'] = pivot_df['component_volume_contract'] / pivot_df['total_product']
|
|
39
|
+
pivot_df['pct_rack'] = pivot_df['component_volume_rack'] / pivot_df['total_product']
|
|
40
|
+
pivot_df = pivot_df.reset_index()
|
|
41
|
+
pivot_df['component_product'] = pivot_df['component_product'].apply(OrderBySiteProductParser.normalize_product)
|
|
42
|
+
pivot_df['site'] = pivot_df['site'].astype(str)
|
|
43
|
+
return pivot_df
|
|
44
|
+
|
|
45
|
+
@staticmethod
|
|
46
|
+
def normalize_product(val):
|
|
47
|
+
try:
|
|
48
|
+
return str(int(float(val)))
|
|
49
|
+
except (ValueError, TypeError):
|
|
50
|
+
return str(val).strip()
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from bb_integrations_lib.protocols.pipelines import Parser
|
|
4
|
+
from typing import override, Dict, List
|
|
5
|
+
from pandas import DataFrame
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class TankConfigsParser(Parser):
|
|
9
|
+
def __init__(self, tenant_name: str, source_system: str | None = None):
|
|
10
|
+
super().__init__(source_system, tenant_name)
|
|
11
|
+
|
|
12
|
+
def __repr__(self) -> str:
|
|
13
|
+
return "Tank configs parser"
|
|
14
|
+
|
|
15
|
+
@override
|
|
16
|
+
async def parse(self, data: List[Dict], mapping_type: str | None = None) -> DataFrame:
|
|
17
|
+
tc_df = pd.DataFrame(data)
|
|
18
|
+
dos_columns = ['store_number', 'product', 'daily_lifting_estimate', 'measured_inventory']
|
|
19
|
+
tc_df = tc_df[dos_columns]
|
|
20
|
+
|
|
21
|
+
tc_df = tc_df.groupby(['store_number', 'product'])[
|
|
22
|
+
['daily_lifting_estimate', 'measured_inventory']].sum().reset_index()
|
|
23
|
+
tc_df['dos'] = np.where(
|
|
24
|
+
(tc_df['measured_inventory'] == 0) | (tc_df['daily_lifting_estimate'] == 0),
|
|
25
|
+
'N/A',
|
|
26
|
+
tc_df['measured_inventory'] / tc_df['daily_lifting_estimate']
|
|
27
|
+
)
|
|
28
|
+
tc_df['dos'] = pd.to_numeric(tc_df['dos'], errors='coerce')
|
|
29
|
+
tc_df['dos_bucket'] = np.where(
|
|
30
|
+
tc_df['dos'] <= 2, '0-2',
|
|
31
|
+
np.where(tc_df['dos'] <= 4, '2-4',
|
|
32
|
+
np.where(tc_df['dos'] <= 6, '4-6',
|
|
33
|
+
np.where(tc_df['dos'] <= 8, '6-8',
|
|
34
|
+
np.where(tc_df['dos'] <= 10, '8-10',
|
|
35
|
+
np.where(tc_df['dos'].isna(), 'N/A', '10+')))))
|
|
36
|
+
)
|
|
37
|
+
tc_df['product'] = tc_df['product'].apply(TankConfigsParser.normalize_product)
|
|
38
|
+
tc_df['store_number'] = tc_df['store_number'].astype(str)
|
|
39
|
+
|
|
40
|
+
return tc_df
|
|
41
|
+
|
|
42
|
+
@staticmethod
|
|
43
|
+
def normalize_product(val):
|
|
44
|
+
try:
|
|
45
|
+
return str(int(float(val)))
|
|
46
|
+
except (ValueError, TypeError):
|
|
47
|
+
return str(val).strip()
|
|
File without changes
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
from typing import AsyncGenerator, Optional
|
|
2
|
+
from typing import override
|
|
3
|
+
|
|
4
|
+
from bb_integrations_lib.gravitate.sd_api import GravitateSDAPI
|
|
5
|
+
from bb_integrations_lib.pipelines.parsers.dtn.model import DTNIntegrationConfig, DTNPriceRecord
|
|
6
|
+
from bb_integrations_lib.shared.exceptions import MappingNotFoundException
|
|
7
|
+
from loguru import logger
|
|
8
|
+
|
|
9
|
+
from bb_integrations_lib.mappers.rita_mapper import RitaMapper, AsyncMappingProvider
|
|
10
|
+
from bb_integrations_lib.models.rita.mapping import MappingType
|
|
11
|
+
from bb_integrations_lib.protocols.pipelines import Parser
|
|
12
|
+
from bb_integrations_lib.shared.model import MappingMode, SupplyPriceUpdateManyRequest
|
|
13
|
+
from bb_integrations_lib.util.utils import lookup
|
|
14
|
+
from pydantic import ValidationError
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DTNPriceParser(Parser):
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
source_system: str | None = None,
|
|
21
|
+
mapping_provider: Optional[AsyncMappingProvider] = None,
|
|
22
|
+
sd_client: GravitateSDAPI = None,
|
|
23
|
+
price_config: DTNIntegrationConfig = None,
|
|
24
|
+
**args
|
|
25
|
+
):
|
|
26
|
+
super().__init__(source_system, mapping_provider)
|
|
27
|
+
self.mapper: Optional[RitaMapper] = None
|
|
28
|
+
self.sd_client = sd_client
|
|
29
|
+
self.price_config = price_config
|
|
30
|
+
self.supply_owner_configs_lkp_by_gravitate_id = lookup(self.price_config.supply_owners, lambda x: x.gravitate_id)
|
|
31
|
+
|
|
32
|
+
@override
|
|
33
|
+
async def parse(self, data: list[dict], mapping_type: MappingMode | None = None) -> AsyncGenerator[
|
|
34
|
+
SupplyPriceUpdateManyRequest, None]:
|
|
35
|
+
if mapping_type is None:
|
|
36
|
+
logger.warning("DTNPriceParser.parse mapping_type is None, defaulting to skip")
|
|
37
|
+
mapping_type = MappingMode.skip
|
|
38
|
+
self.mapper = await self.load_mapper()
|
|
39
|
+
preparsed_records = self.preparse(data, mapping_type)
|
|
40
|
+
for rec in preparsed_records:
|
|
41
|
+
with logger.catch(message=f"Skipped record {rec} due to error"):
|
|
42
|
+
supply_owner_config = self.supply_owner_configs_lkp_by_gravitate_id.get(rec.supply_owner)
|
|
43
|
+
extend_by_days = supply_owner_config.extend_by_days if supply_owner_config else 3
|
|
44
|
+
expire_in_hours = supply_owner_config.expire_in_hours if supply_owner_config else 24
|
|
45
|
+
yield SupplyPriceUpdateManyRequest(
|
|
46
|
+
source_id=rec.source_id,
|
|
47
|
+
source_system_id=rec.source_system,
|
|
48
|
+
terminal_id=rec.terminal,
|
|
49
|
+
product_id=rec.product,
|
|
50
|
+
supplier_id=rec.supplier,
|
|
51
|
+
effective_from=rec.effective_from_date,
|
|
52
|
+
effective_to=rec.add_days(extend_by_days),
|
|
53
|
+
price=rec.price,
|
|
54
|
+
price_type=rec.price_type,
|
|
55
|
+
timezone=None, # allways needs to be None
|
|
56
|
+
contract=rec.contract,
|
|
57
|
+
counterparty_id=rec.supply_owner,
|
|
58
|
+
expire=rec.add_hours(expire_in_hours),
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
async def get_store_lkp(self):
|
|
62
|
+
stores = await self.sd_client.all_stores()
|
|
63
|
+
return lookup(stores.json(), lambda x: x.get("store_number"))
|
|
64
|
+
|
|
65
|
+
def try_to_map_composite_record(self, record: dict):
|
|
66
|
+
try:
|
|
67
|
+
ret = self.mapper.get_gravitate_id_by_composite(record, MappingType.composite)
|
|
68
|
+
return ret
|
|
69
|
+
except KeyError as e:
|
|
70
|
+
raise MappingNotFoundException(f"Unable to find matching record for record {record}") from e
|
|
71
|
+
|
|
72
|
+
def preparse(self, records: list[dict], mapping_type: MappingMode) -> list[DTNPriceRecord]:
|
|
73
|
+
parsed_records = []
|
|
74
|
+
mapping_failures = []
|
|
75
|
+
validation_failures = []
|
|
76
|
+
for translated in records:
|
|
77
|
+
try:
|
|
78
|
+
record = DTNPriceRecord(**translated)
|
|
79
|
+
mapped_record = self.try_to_map_composite_record(record.map_key)
|
|
80
|
+
mapped_record_key = {k.replace(" ", "_"): v for k, v in mapped_record.key.items()}
|
|
81
|
+
updated_mapped_record = record.model_copy(update=mapped_record_key)
|
|
82
|
+
parsed_records.append(updated_mapped_record)
|
|
83
|
+
except ValidationError as e:
|
|
84
|
+
validation_failures.append({
|
|
85
|
+
"record": translated,
|
|
86
|
+
"error": str(e),
|
|
87
|
+
})
|
|
88
|
+
logger.warning(f"Skipped invalid record {translated}: {e}")
|
|
89
|
+
continue
|
|
90
|
+
except MappingNotFoundException as mnfe:
|
|
91
|
+
mapping_failures.append({
|
|
92
|
+
"record": translated,
|
|
93
|
+
"error": str(mnfe),
|
|
94
|
+
})
|
|
95
|
+
logger.warning(f"Skipped record due to mapping not found {translated}: {mnfe}")
|
|
96
|
+
continue
|
|
97
|
+
self.logs = {
|
|
98
|
+
"validation_failures": validation_failures,
|
|
99
|
+
"mapping_failures": mapping_failures,
|
|
100
|
+
"successful_records": len(parsed_records),
|
|
101
|
+
}
|
|
102
|
+
return parsed_records
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import math
|
|
2
|
+
from datetime import datetime, timedelta
|
|
3
|
+
|
|
4
|
+
from bb_integrations_lib.shared.model import ConfigMatchMode, ConfigMode, MappingMode
|
|
5
|
+
from pydantic import BaseModel, field_validator, Field
|
|
6
|
+
from dateutil.parser import parse
|
|
7
|
+
from bb_integrations_lib.shared.shared_enums import PriceType
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SupplyOwnerConfig(BaseModel):
|
|
11
|
+
gravitate_id: str
|
|
12
|
+
extend_by_days: int = 3
|
|
13
|
+
expire_in_hours: int = 24
|
|
14
|
+
|
|
15
|
+
class DTNIntegrationConfig(BaseModel):
|
|
16
|
+
config_name: str
|
|
17
|
+
ftp_credentials: str
|
|
18
|
+
file_match_mode: ConfigMatchMode = ConfigMatchMode.Partial
|
|
19
|
+
config_match_mode: ConfigMode = ConfigMode.ByName
|
|
20
|
+
mapping_mode: MappingMode = MappingMode.full
|
|
21
|
+
supply_owners: list[SupplyOwnerConfig]
|
|
22
|
+
|
|
23
|
+
class DTNPriceRecord(BaseModel):
|
|
24
|
+
supplier: str
|
|
25
|
+
terminal: str
|
|
26
|
+
product: str
|
|
27
|
+
price: float
|
|
28
|
+
source_system: str = "DTN"
|
|
29
|
+
effective_from_date: datetime
|
|
30
|
+
brand: str | None = None
|
|
31
|
+
supply_owner: str | None = Field(default=None, alias="supply owner")
|
|
32
|
+
price_type: PriceType | None = Field(default=PriceType.rack, alias="price type")
|
|
33
|
+
contract: str | None = None
|
|
34
|
+
model_config = {'extra': 'ignore', 'populate_by_name': True}
|
|
35
|
+
|
|
36
|
+
@field_validator('supplier', 'terminal', 'product', 'price', mode='before')
|
|
37
|
+
@classmethod
|
|
38
|
+
def reject_nan(cls, v, info):
|
|
39
|
+
if v is None:
|
|
40
|
+
raise ValueError(f'{info.field_name} cannot be None')
|
|
41
|
+
if isinstance(v, str) and v.lower() == 'nan':
|
|
42
|
+
raise ValueError(f'{info.field_name} cannot be NaN')
|
|
43
|
+
if isinstance(v, float) and math.isnan(v):
|
|
44
|
+
raise ValueError(f'{info.field_name} cannot be NaN')
|
|
45
|
+
return v
|
|
46
|
+
|
|
47
|
+
@field_validator('effective_from_date', mode='before')
|
|
48
|
+
@classmethod
|
|
49
|
+
def parse_date(cls, v, info):
|
|
50
|
+
if isinstance(v, str):
|
|
51
|
+
try:
|
|
52
|
+
return parse(v)
|
|
53
|
+
except (ValueError, TypeError):
|
|
54
|
+
raise ValueError(f'{info.field_name} must be a valid datetime string')
|
|
55
|
+
return v
|
|
56
|
+
|
|
57
|
+
@property
|
|
58
|
+
def source_id(self) -> str:
|
|
59
|
+
parts = [self.supplier, self.terminal, self.product, self.source_system, self.supply_owner]
|
|
60
|
+
if self.brand:
|
|
61
|
+
parts.append(self.brand)
|
|
62
|
+
return "|".join(parts)
|
|
63
|
+
|
|
64
|
+
@property
|
|
65
|
+
def map_key(self):
|
|
66
|
+
return {
|
|
67
|
+
"supplier": self.supplier,
|
|
68
|
+
"terminal": self.terminal,
|
|
69
|
+
"product": self.product,
|
|
70
|
+
"brand": self.brand,
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
def add_days(self, days: int) -> datetime:
|
|
74
|
+
"""Return effective_from_date plus the specified days."""
|
|
75
|
+
return self.effective_from_date + timedelta(days=days)
|
|
76
|
+
|
|
77
|
+
def add_hours(self, hours: int) -> datetime:
|
|
78
|
+
"""Return effective_from_date plus the specified hours."""
|
|
79
|
+
return self.effective_from_date + timedelta(hours=hours)
|
|
File without changes
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
from datetime import datetime, timedelta
|
|
2
|
+
from typing import List, override
|
|
3
|
+
|
|
4
|
+
import pytz
|
|
5
|
+
from bb_integrations_lib.models.rita.mapping import MappingType
|
|
6
|
+
from bb_integrations_lib.protocols.pipelines import Parser
|
|
7
|
+
from bb_integrations_lib.shared.model import PEPriceData
|
|
8
|
+
from loguru import logger
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AccessorialPricesParser(Parser):
|
|
12
|
+
def __init__(self, tenant_name: str, source_system: str | None = None, timezone: str = "UTC"):
|
|
13
|
+
self.tz = timezone
|
|
14
|
+
super().__init__(tenant_name=tenant_name, source_system=source_system)
|
|
15
|
+
|
|
16
|
+
def __repr__(self) -> str:
|
|
17
|
+
return "Parse spot prices from rows to Sync Price Rows in Pricing Engine."
|
|
18
|
+
|
|
19
|
+
@override
|
|
20
|
+
async def parse(self, data: List[PEPriceData], mapping_type: MappingType | None = None) -> list[dict]:
|
|
21
|
+
mapper = await self.load_mapper()
|
|
22
|
+
mapping_failures = []
|
|
23
|
+
translation_failures = []
|
|
24
|
+
parsed_rows: List[dict] = []
|
|
25
|
+
processed_assessorial_ids = set()
|
|
26
|
+
latest_only = list(filter(lambda x: x.Rank == 1, data))
|
|
27
|
+
for translated_row in latest_only:
|
|
28
|
+
try:
|
|
29
|
+
local_tz = pytz.timezone(self.tz)
|
|
30
|
+
effective_from = datetime.now(local_tz).replace(
|
|
31
|
+
hour=0, minute=0, second=0, microsecond=0
|
|
32
|
+
) + timedelta(days=1)
|
|
33
|
+
price_instrument_id = translated_row.PriceInstrumentId
|
|
34
|
+
accessorial_id = mapper.get_gravitate_parent_id(str(price_instrument_id), MappingType.other)
|
|
35
|
+
price = translated_row.CurvePointPrices[0].Value
|
|
36
|
+
rate = mapper.get_gravitate_parent_id(str(accessorial_id), MappingType.other)
|
|
37
|
+
row = {
|
|
38
|
+
"accessorial_id": accessorial_id,
|
|
39
|
+
"effective_from_date": effective_from.isoformat(),
|
|
40
|
+
"rate": str(AccessorialPricesParser.apply_rate(rate, price))
|
|
41
|
+
}
|
|
42
|
+
if accessorial_id not in processed_assessorial_ids:
|
|
43
|
+
parsed_rows.append(row)
|
|
44
|
+
processed_assessorial_ids.add(accessorial_id)
|
|
45
|
+
except KeyError as e:
|
|
46
|
+
logger.warning(f"Failed to parse row {translated_row} due to mapping issue {e}")
|
|
47
|
+
mapping_failures.append(translated_row)
|
|
48
|
+
continue
|
|
49
|
+
except Exception as e:
|
|
50
|
+
logger.warning(f"Failed to parse row {translated_row} due to {e}")
|
|
51
|
+
translation_failures.append(translated_row)
|
|
52
|
+
continue
|
|
53
|
+
return parsed_rows
|
|
54
|
+
|
|
55
|
+
@staticmethod
|
|
56
|
+
def apply_rate(rate, value) -> float:
|
|
57
|
+
try:
|
|
58
|
+
float_value = float(value)
|
|
59
|
+
except (ValueError, TypeError):
|
|
60
|
+
raise ValueError(f"Invalid value: {value}")
|
|
61
|
+
match rate.lower():
|
|
62
|
+
case "positive":
|
|
63
|
+
return float_value
|
|
64
|
+
case "negative":
|
|
65
|
+
return float_value * -1
|
|
66
|
+
case _:
|
|
67
|
+
raise NotImplementedError(f"Unsupported rate: {rate}")
|
|
File without changes
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
from typing import Dict, List, override, AsyncGenerator
|
|
2
|
+
from dateutil.parser import parse
|
|
3
|
+
from loguru import logger
|
|
4
|
+
|
|
5
|
+
from bb_integrations_lib.models.rita.mapping import MappingType
|
|
6
|
+
from bb_integrations_lib.pipelines.parsers.price_engine.price_file_upload.shared import PriceSyncParser
|
|
7
|
+
from bb_integrations_lib.shared.model import MappingMode
|
|
8
|
+
|
|
9
|
+
from bb_integrations_lib.protocols.flat_file import PELookup, \
|
|
10
|
+
PriceMergeIntegrationDTO, PriceMergeValue, PePriceMergeIntegration
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class PricesMergeParser(PriceSyncParser):
|
|
14
|
+
def __init__(self, tenant_name: str, source_system: str | None = None):
|
|
15
|
+
super().__init__(tenant_name, source_system)
|
|
16
|
+
|
|
17
|
+
def __repr__(self) -> str:
|
|
18
|
+
return "Parse prices from rows to Merge Price Rows in Pricing Engine."
|
|
19
|
+
|
|
20
|
+
@override
|
|
21
|
+
async def parse(self, data: List[Dict], mapping_type: MappingMode | None = None) -> AsyncGenerator[
|
|
22
|
+
PePriceMergeIntegration, None]:
|
|
23
|
+
mapper = await self.load_mapper()
|
|
24
|
+
mapping_failures = []
|
|
25
|
+
translation_failures = []
|
|
26
|
+
dtos: List[PriceMergeIntegrationDTO] = []
|
|
27
|
+
for translated_row in data:
|
|
28
|
+
try:
|
|
29
|
+
row_is_rvp = PricesMergeParser.is_rvp(
|
|
30
|
+
translated_row.get('RVP', '0.0')) # This will always be false if RVP does not exist
|
|
31
|
+
price_publisher_name = translated_row['price_publisher']
|
|
32
|
+
configuration = translated_row['configuration']
|
|
33
|
+
supplier_key = translated_row['supplier_key']
|
|
34
|
+
location_key = translated_row['location_key']
|
|
35
|
+
product_key = translated_row['product_key']
|
|
36
|
+
location_name = translated_row.get('location_name', None)
|
|
37
|
+
product_name = translated_row.get('product_name', None)
|
|
38
|
+
supplier_name = translated_row.get('supplier_name', None)
|
|
39
|
+
price_factor = translated_row.get('price_factor')
|
|
40
|
+
source_system_id = translated_row['source_system_id']
|
|
41
|
+
effective_from_str = translated_row['date']
|
|
42
|
+
effective_from_hrs_override = translated_row.get('effective_from_hrs_override', None)
|
|
43
|
+
effective_from_mins_override = translated_row.get('effective_from_mins_override', None)
|
|
44
|
+
effective_from = PricesMergeParser.get_effective_from_date(effective_from_str,
|
|
45
|
+
effective_from_hrs_override,
|
|
46
|
+
effective_from_mins_override)
|
|
47
|
+
effective_to_str = translated_row.get('effective_to',
|
|
48
|
+
None)
|
|
49
|
+
effective_to_hrs_override = translated_row.get('effective_to_hrs_override', None)
|
|
50
|
+
effective_to_mins_override = translated_row.get('effective_to_mins_override', None)
|
|
51
|
+
effective_to = PricesMergeParser.get_effective_to_date(effective_to_str,
|
|
52
|
+
effective_to_hrs_override,
|
|
53
|
+
effective_to_mins_override)
|
|
54
|
+
price = translated_row['price']
|
|
55
|
+
|
|
56
|
+
if row_is_rvp and not "RVP" in product_key:
|
|
57
|
+
product_key = PricesMergeParser.format_rvp_product(product_key, translated_row.get('RVP'))
|
|
58
|
+
|
|
59
|
+
if price_factor is not None:
|
|
60
|
+
price = float(price) / int(price_factor)
|
|
61
|
+
|
|
62
|
+
if mapping_type == MappingMode.full:
|
|
63
|
+
supplier_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
|
|
64
|
+
source_child_id=supplier_key,
|
|
65
|
+
mapping_type=MappingType.counterparty)
|
|
66
|
+
location_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
|
|
67
|
+
source_child_id=location_key,
|
|
68
|
+
mapping_type=MappingType.terminal
|
|
69
|
+
)
|
|
70
|
+
product_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
|
|
71
|
+
source_child_id=product_key,
|
|
72
|
+
mapping_type=MappingType.product)
|
|
73
|
+
elif mapping_type == MappingMode.skip:
|
|
74
|
+
supplier_source_id = supplier_key
|
|
75
|
+
location_source_id = location_key
|
|
76
|
+
product_source_id = product_key
|
|
77
|
+
else:
|
|
78
|
+
raise ValueError(f"Unsupported mapping type: {mapping_type}")
|
|
79
|
+
if product_name is not None and location_name is not None and supplier_name is not None:
|
|
80
|
+
price_instrument_name = f"{product_name} @ {location_name} - {supplier_name}"
|
|
81
|
+
else:
|
|
82
|
+
price_instrument_name = f"{product_source_id} @ {location_source_id} - {supplier_source_id}"
|
|
83
|
+
price_instrument_source_string_id = f"{price_publisher_name} - {price_instrument_name}"
|
|
84
|
+
dtos.append(PriceMergeIntegrationDTO(
|
|
85
|
+
PriceInstrumentLookup=PELookup(
|
|
86
|
+
SourceIdString=price_instrument_source_string_id,
|
|
87
|
+
SourceSystemId=int(source_system_id)
|
|
88
|
+
),
|
|
89
|
+
EffectiveFromDateTime=effective_from,
|
|
90
|
+
EffectiveToDateTime=effective_to, # Effective to date is optional
|
|
91
|
+
PriceValues=[
|
|
92
|
+
PriceMergeValue(
|
|
93
|
+
Value=float(price),
|
|
94
|
+
)
|
|
95
|
+
]
|
|
96
|
+
))
|
|
97
|
+
except (KeyError, ValueError) as e:
|
|
98
|
+
mapping_failures.append(translated_row)
|
|
99
|
+
logger.warning(f"Skipped record {translated_row} due to Key Error or Value Error: {e}")
|
|
100
|
+
continue
|
|
101
|
+
except Exception as uh:
|
|
102
|
+
translation_failures.append(translated_row)
|
|
103
|
+
logger.warning(f"Skipped record {translated_row} due to unhandled exception: {uh.args}")
|
|
104
|
+
continue
|
|
105
|
+
if dtos and price_publisher_name and source_system_id:
|
|
106
|
+
yield PePriceMergeIntegration(
|
|
107
|
+
IntegrationDtos=dtos,
|
|
108
|
+
SourceSystemId=int(source_system_id)
|
|
109
|
+
)
|
|
110
|
+
else:
|
|
111
|
+
logger.warning("No valid records were processed to create an integration")
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
from typing import Dict, List, override, AsyncGenerator
|
|
2
|
+
|
|
3
|
+
from loguru import logger
|
|
4
|
+
|
|
5
|
+
from bb_integrations_lib.models.rita.mapping import MappingType
|
|
6
|
+
from bb_integrations_lib.pipelines.parsers.price_engine.price_file_upload.shared import PriceSyncParser
|
|
7
|
+
from bb_integrations_lib.shared.model import MappingMode
|
|
8
|
+
from bb_integrations_lib.protocols.flat_file import PriceInstrumentDTO, PELookup, PeBulkSyncIntegration, \
|
|
9
|
+
BulkSyncIntegrationDTO, PriceTypeDTO
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class PricesSyncParser(PriceSyncParser):
|
|
13
|
+
def __init__(self, tenant_name: str, source_system: str | None = None):
|
|
14
|
+
super().__init__(tenant_name=tenant_name, source_system=source_system)
|
|
15
|
+
|
|
16
|
+
def __repr__(self) -> str:
|
|
17
|
+
return "Parse prices from rows to Sync Price Rows in Pricing Engine."
|
|
18
|
+
|
|
19
|
+
@override
|
|
20
|
+
async def parse(self, data: List[Dict], mapping_type: MappingMode | None = None) -> AsyncGenerator[
|
|
21
|
+
PeBulkSyncIntegration, None]:
|
|
22
|
+
mapper = await self.load_mapper()
|
|
23
|
+
mapping_failures = []
|
|
24
|
+
translation_failures = []
|
|
25
|
+
dtos: List[PriceInstrumentDTO] = []
|
|
26
|
+
for translated_row in data:
|
|
27
|
+
try:
|
|
28
|
+
row_is_rvp = PricesSyncParser.is_rvp(translated_row.get('RVP', '0.0'))
|
|
29
|
+
price_publisher_name = translated_row['price_publisher']
|
|
30
|
+
configuration = translated_row['configuration']
|
|
31
|
+
supplier_key = translated_row['supplier_key']
|
|
32
|
+
location_key = translated_row['location_key']
|
|
33
|
+
product_key = translated_row['product_key']
|
|
34
|
+
posting_type = translated_row.get('posting_type', "Posting")
|
|
35
|
+
location_name = translated_row.get('location_name', None)
|
|
36
|
+
product_name = translated_row.get('product_name', None)
|
|
37
|
+
supplier_name = translated_row.get('supplier_name', None)
|
|
38
|
+
if row_is_rvp and not "RVP" in product_key:
|
|
39
|
+
product_key = PricesSyncParser.format_rvp_product(product_key, translated_row['RVP'])
|
|
40
|
+
source_system_id = translated_row['source_system_id']
|
|
41
|
+
|
|
42
|
+
if mapping_type == MappingMode.full:
|
|
43
|
+
supplier_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
|
|
44
|
+
source_child_id=supplier_key,
|
|
45
|
+
mapping_type=MappingType.counterparty)
|
|
46
|
+
location_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
|
|
47
|
+
source_child_id=location_key,
|
|
48
|
+
mapping_type=MappingType.terminal
|
|
49
|
+
)
|
|
50
|
+
product_source_id = mapper.get_gravitate_child_id(source_parent_id=configuration,
|
|
51
|
+
source_child_id=product_key,
|
|
52
|
+
mapping_type=MappingType.product)
|
|
53
|
+
elif mapping_type == MappingMode.skip:
|
|
54
|
+
supplier_source_id = supplier_key
|
|
55
|
+
location_source_id = location_key
|
|
56
|
+
product_source_id = product_key
|
|
57
|
+
else:
|
|
58
|
+
raise ValueError(f"Unsupported mapping type: {mapping_type}")
|
|
59
|
+
if product_name is not None and location_name is not None and supplier_name is not None:
|
|
60
|
+
price_instrument_name = f"{product_name} @ {location_name} - {supplier_name}"
|
|
61
|
+
else:
|
|
62
|
+
price_instrument_name = f"{product_source_id} @ {location_source_id} - {supplier_source_id}"
|
|
63
|
+
price_instrument_source_string_id = f"{price_publisher_name} - {price_instrument_name}"
|
|
64
|
+
dtos.append(PriceInstrumentDTO(
|
|
65
|
+
Name=price_instrument_name,
|
|
66
|
+
Abbreviation=price_instrument_name,
|
|
67
|
+
SourceIdString=price_instrument_source_string_id,
|
|
68
|
+
ProductLookup=PELookup(
|
|
69
|
+
SourceIdString=product_source_id,
|
|
70
|
+
SourceSystemId=int(source_system_id)
|
|
71
|
+
),
|
|
72
|
+
LocationLookup=PELookup(
|
|
73
|
+
SourceIdString=location_source_id,
|
|
74
|
+
SourceSystemId=int(source_system_id)
|
|
75
|
+
),
|
|
76
|
+
CounterPartyLookup=PELookup(
|
|
77
|
+
SourceIdString=supplier_source_id,
|
|
78
|
+
SourceSystemId=int(source_system_id)
|
|
79
|
+
)
|
|
80
|
+
))
|
|
81
|
+
except (KeyError, ValueError) as e:
|
|
82
|
+
mapping_failures.append(translated_row)
|
|
83
|
+
logger.warning(f"Skipped record {translated_row} due to Key Error or Value Error: {e}")
|
|
84
|
+
continue
|
|
85
|
+
except Exception as uh:
|
|
86
|
+
translation_failures.append(translated_row)
|
|
87
|
+
logger.warning(f"Skipped record {translated_row} due to unhandled exception: {uh.args}")
|
|
88
|
+
continue
|
|
89
|
+
if dtos and price_publisher_name and source_system_id:
|
|
90
|
+
yield PeBulkSyncIntegration(
|
|
91
|
+
IntegrationDtos=[
|
|
92
|
+
BulkSyncIntegrationDTO(
|
|
93
|
+
Name=price_publisher_name,
|
|
94
|
+
Abbreviation=price_publisher_name,
|
|
95
|
+
SourceIdString=price_publisher_name,
|
|
96
|
+
PriceInstrumentDTOs=dtos,
|
|
97
|
+
PriceTypeDTOs=[
|
|
98
|
+
PriceTypeDTO(
|
|
99
|
+
PriceTypeMeaning=posting_type
|
|
100
|
+
)
|
|
101
|
+
]
|
|
102
|
+
)
|
|
103
|
+
],
|
|
104
|
+
SourceSystemId=int(source_system_id)
|
|
105
|
+
)
|
|
106
|
+
else:
|
|
107
|
+
logger.warning("No valid records were processed to create an integration")
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
from typing import List, Dict, Any
|
|
2
|
+
|
|
3
|
+
from dateutil.parser import parse
|
|
4
|
+
|
|
5
|
+
from bb_integrations_lib.protocols.pipelines import Parser
|
|
6
|
+
from bb_integrations_lib.shared.model import MappingMode
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PriceSyncParser(Parser):
|
|
10
|
+
def __init__(self, tenant_name: str, source_system: str | None = None):
|
|
11
|
+
super().__init__(tenant_name=tenant_name, source_system=source_system)
|
|
12
|
+
|
|
13
|
+
def __repr__(self) -> str:
|
|
14
|
+
return "Parse prices from rows to Sync Price Rows in Pricing Engine."
|
|
15
|
+
|
|
16
|
+
async def parse(self, data: List[Dict], mapping_type: MappingMode | None = None) -> Any:
|
|
17
|
+
pass
|
|
18
|
+
|
|
19
|
+
@staticmethod
|
|
20
|
+
def is_rvp(rvp: str) -> bool:
|
|
21
|
+
try:
|
|
22
|
+
return float(rvp) > 0.0
|
|
23
|
+
except (ValueError, TypeError):
|
|
24
|
+
return False
|
|
25
|
+
|
|
26
|
+
@staticmethod
|
|
27
|
+
def format_rvp_product(product_key: str, rvp: str | None) -> str:
|
|
28
|
+
if not rvp:
|
|
29
|
+
return product_key
|
|
30
|
+
rvp_str = str(rvp)
|
|
31
|
+
if product_key.endswith(rvp_str):
|
|
32
|
+
product_key = product_key[:-len(rvp_str)]
|
|
33
|
+
product_key = product_key.rstrip('.')
|
|
34
|
+
return f"{product_key}{float(rvp_str)}"
|
|
35
|
+
|
|
36
|
+
@staticmethod
|
|
37
|
+
def get_effective_to_date(effective_to_str: str | None,
|
|
38
|
+
effective_to_hrs_override: str | None,
|
|
39
|
+
effective_to_minutes_override: str | None) -> str | None:
|
|
40
|
+
return PriceSyncParser.get_date_override(effective_to_str, effective_to_hrs_override,
|
|
41
|
+
effective_to_minutes_override)
|
|
42
|
+
|
|
43
|
+
@staticmethod
|
|
44
|
+
def get_effective_from_date(effective_from_str: str | None,
|
|
45
|
+
effective_from_hrs_override: str | None,
|
|
46
|
+
effective_from_minutes_override: str | None) -> str | None:
|
|
47
|
+
return PriceSyncParser.get_date_override(effective_from_str, effective_from_hrs_override,
|
|
48
|
+
effective_from_minutes_override)
|
|
49
|
+
|
|
50
|
+
@staticmethod
|
|
51
|
+
def get_date_override(date_str: str | None,
|
|
52
|
+
date_to_hrs_override: str | None,
|
|
53
|
+
date_to_minutes_override: str | None) -> str | None:
|
|
54
|
+
|
|
55
|
+
if not date_str:
|
|
56
|
+
return date_str
|
|
57
|
+
|
|
58
|
+
parsed_date = parse(date_str)
|
|
59
|
+
|
|
60
|
+
if date_to_hrs_override and date_to_minutes_override:
|
|
61
|
+
return parsed_date.replace(
|
|
62
|
+
hour=int(date_to_hrs_override),
|
|
63
|
+
minute=int(date_to_minutes_override),
|
|
64
|
+
second=0
|
|
65
|
+
).isoformat()
|
|
66
|
+
|
|
67
|
+
elif date_to_hrs_override:
|
|
68
|
+
return parsed_date.replace(
|
|
69
|
+
hour=int(date_to_hrs_override),
|
|
70
|
+
minute=0,
|
|
71
|
+
second=0
|
|
72
|
+
).isoformat()
|
|
73
|
+
|
|
74
|
+
elif date_to_minutes_override:
|
|
75
|
+
return parsed_date.replace(
|
|
76
|
+
minute=int(date_to_minutes_override),
|
|
77
|
+
second=0
|
|
78
|
+
).isoformat()
|
|
79
|
+
|
|
80
|
+
else:
|
|
81
|
+
return date_str
|