bb-integrations-library 3.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bb_integrations_lib/__init__.py +0 -0
- bb_integrations_lib/converters/__init__.py +0 -0
- bb_integrations_lib/gravitate/__init__.py +0 -0
- bb_integrations_lib/gravitate/base_api.py +20 -0
- bb_integrations_lib/gravitate/model.py +29 -0
- bb_integrations_lib/gravitate/pe_api.py +122 -0
- bb_integrations_lib/gravitate/rita_api.py +552 -0
- bb_integrations_lib/gravitate/sd_api.py +572 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/models.py +1398 -0
- bb_integrations_lib/gravitate/testing/TTE/sd/tests/test_models.py +2987 -0
- bb_integrations_lib/gravitate/testing/__init__.py +0 -0
- bb_integrations_lib/gravitate/testing/builder.py +55 -0
- bb_integrations_lib/gravitate/testing/openapi.py +70 -0
- bb_integrations_lib/gravitate/testing/util.py +274 -0
- bb_integrations_lib/mappers/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/__init__.py +0 -0
- bb_integrations_lib/mappers/prices/model.py +106 -0
- bb_integrations_lib/mappers/prices/price_mapper.py +127 -0
- bb_integrations_lib/mappers/prices/protocol.py +20 -0
- bb_integrations_lib/mappers/prices/util.py +61 -0
- bb_integrations_lib/mappers/rita_mapper.py +523 -0
- bb_integrations_lib/models/__init__.py +0 -0
- bb_integrations_lib/models/dtn_supplier_invoice.py +487 -0
- bb_integrations_lib/models/enums.py +28 -0
- bb_integrations_lib/models/pipeline_structs.py +76 -0
- bb_integrations_lib/models/probe/probe_event.py +20 -0
- bb_integrations_lib/models/probe/request_data.py +431 -0
- bb_integrations_lib/models/probe/resume_token.py +7 -0
- bb_integrations_lib/models/rita/audit.py +113 -0
- bb_integrations_lib/models/rita/auth.py +30 -0
- bb_integrations_lib/models/rita/bucket.py +17 -0
- bb_integrations_lib/models/rita/config.py +188 -0
- bb_integrations_lib/models/rita/constants.py +19 -0
- bb_integrations_lib/models/rita/crossroads_entities.py +293 -0
- bb_integrations_lib/models/rita/crossroads_mapping.py +428 -0
- bb_integrations_lib/models/rita/crossroads_monitoring.py +78 -0
- bb_integrations_lib/models/rita/crossroads_network.py +41 -0
- bb_integrations_lib/models/rita/crossroads_rules.py +80 -0
- bb_integrations_lib/models/rita/email.py +39 -0
- bb_integrations_lib/models/rita/issue.py +63 -0
- bb_integrations_lib/models/rita/mapping.py +227 -0
- bb_integrations_lib/models/rita/probe.py +58 -0
- bb_integrations_lib/models/rita/reference_data.py +110 -0
- bb_integrations_lib/models/rita/source_system.py +9 -0
- bb_integrations_lib/models/rita/workers.py +76 -0
- bb_integrations_lib/models/sd/bols_and_drops.py +241 -0
- bb_integrations_lib/models/sd/get_order.py +301 -0
- bb_integrations_lib/models/sd/orders.py +18 -0
- bb_integrations_lib/models/sd_api.py +115 -0
- bb_integrations_lib/pipelines/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/order_by_site_product_parser.py +50 -0
- bb_integrations_lib/pipelines/parsers/distribution_report/tank_configs_parser.py +47 -0
- bb_integrations_lib/pipelines/parsers/dtn/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/dtn/dtn_price_parser.py +102 -0
- bb_integrations_lib/pipelines/parsers/dtn/model.py +79 -0
- bb_integrations_lib/pipelines/parsers/price_engine/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/parse_accessorials_prices_parser.py +67 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/__init__.py +0 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_merge_parser.py +111 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/price_sync_parser.py +107 -0
- bb_integrations_lib/pipelines/parsers/price_engine/price_file_upload/shared.py +81 -0
- bb_integrations_lib/pipelines/parsers/tank_reading_parser.py +155 -0
- bb_integrations_lib/pipelines/parsers/tank_sales_parser.py +144 -0
- bb_integrations_lib/pipelines/shared/__init__.py +0 -0
- bb_integrations_lib/pipelines/shared/allocation_matching.py +227 -0
- bb_integrations_lib/pipelines/shared/bol_allocation.py +2793 -0
- bb_integrations_lib/pipelines/steps/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/create_accessorials_step.py +80 -0
- bb_integrations_lib/pipelines/steps/distribution_report/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/distribution_report/distribution_report_datafram_to_raw_data.py +33 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_model_history_step.py +50 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_order_by_site_product_step.py +62 -0
- bb_integrations_lib/pipelines/steps/distribution_report/get_tank_configs_step.py +40 -0
- bb_integrations_lib/pipelines/steps/distribution_report/join_distribution_order_dos_step.py +85 -0
- bb_integrations_lib/pipelines/steps/distribution_report/upload_distribution_report_datafram_to_big_query.py +47 -0
- bb_integrations_lib/pipelines/steps/echo_step.py +14 -0
- bb_integrations_lib/pipelines/steps/export_dataframe_to_rawdata_step.py +28 -0
- bb_integrations_lib/pipelines/steps/exporting/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_payroll_file_step.py +107 -0
- bb_integrations_lib/pipelines/steps/exporting/bbd_export_readings_step.py +236 -0
- bb_integrations_lib/pipelines/steps/exporting/cargas_wholesale_bundle_upload_step.py +33 -0
- bb_integrations_lib/pipelines/steps/exporting/dataframe_flat_file_export.py +29 -0
- bb_integrations_lib/pipelines/steps/exporting/gcs_bucket_export_file_step.py +34 -0
- bb_integrations_lib/pipelines/steps/exporting/keyvu_export_step.py +356 -0
- bb_integrations_lib/pipelines/steps/exporting/pe_price_export_step.py +238 -0
- bb_integrations_lib/pipelines/steps/exporting/platform_science_order_sync_step.py +500 -0
- bb_integrations_lib/pipelines/steps/exporting/save_rawdata_to_disk.py +15 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_file_step.py +60 -0
- bb_integrations_lib/pipelines/steps/exporting/sftp_export_many_files_step.py +23 -0
- bb_integrations_lib/pipelines/steps/exporting/update_exported_orders_table_step.py +64 -0
- bb_integrations_lib/pipelines/steps/filter_step.py +22 -0
- bb_integrations_lib/pipelines/steps/get_latest_sync_date.py +34 -0
- bb_integrations_lib/pipelines/steps/importing/bbd_import_payroll_step.py +30 -0
- bb_integrations_lib/pipelines/steps/importing/get_order_numbers_to_export_step.py +138 -0
- bb_integrations_lib/pipelines/steps/importing/load_file_to_dataframe_step.py +46 -0
- bb_integrations_lib/pipelines/steps/importing/load_imap_attachment_step.py +172 -0
- bb_integrations_lib/pipelines/steps/importing/pe_bulk_sync_price_structure_step.py +68 -0
- bb_integrations_lib/pipelines/steps/importing/pe_price_merge_step.py +86 -0
- bb_integrations_lib/pipelines/steps/importing/sftp_file_config_step.py +124 -0
- bb_integrations_lib/pipelines/steps/importing/test_exact_file_match.py +57 -0
- bb_integrations_lib/pipelines/steps/null_step.py +15 -0
- bb_integrations_lib/pipelines/steps/pe_integration_job_step.py +32 -0
- bb_integrations_lib/pipelines/steps/processing/__init__.py +0 -0
- bb_integrations_lib/pipelines/steps/processing/archive_gcs_step.py +76 -0
- bb_integrations_lib/pipelines/steps/processing/archive_sftp_step.py +48 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_format_tank_readings_step.py +492 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_prices_step.py +54 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tank_sales_step.py +124 -0
- bb_integrations_lib/pipelines/steps/processing/bbd_upload_tankreading_step.py +80 -0
- bb_integrations_lib/pipelines/steps/processing/convert_bbd_order_to_cargas_step.py +226 -0
- bb_integrations_lib/pipelines/steps/processing/delete_sftp_step.py +33 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/__init__.py +2 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/convert_dtn_invoice_to_sd_model.py +145 -0
- bb_integrations_lib/pipelines/steps/processing/dtn/parse_dtn_invoice_step.py +38 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step.py +720 -0
- bb_integrations_lib/pipelines/steps/processing/file_config_parser_step_v2.py +418 -0
- bb_integrations_lib/pipelines/steps/processing/get_sd_price_price_request.py +105 -0
- bb_integrations_lib/pipelines/steps/processing/keyvu_upload_deliveryplan_step.py +39 -0
- bb_integrations_lib/pipelines/steps/processing/mark_orders_exported_in_bbd_step.py +185 -0
- bb_integrations_lib/pipelines/steps/processing/pe_price_rows_processing_step.py +174 -0
- bb_integrations_lib/pipelines/steps/processing/send_process_report_step.py +47 -0
- bb_integrations_lib/pipelines/steps/processing/sftp_renamer_step.py +61 -0
- bb_integrations_lib/pipelines/steps/processing/tank_reading_touchup_steps.py +75 -0
- bb_integrations_lib/pipelines/steps/processing/upload_supplier_invoice_step.py +16 -0
- bb_integrations_lib/pipelines/steps/send_attached_in_rita_email_step.py +44 -0
- bb_integrations_lib/pipelines/steps/send_rita_email_step.py +34 -0
- bb_integrations_lib/pipelines/steps/sleep_step.py +24 -0
- bb_integrations_lib/pipelines/wrappers/__init__.py +0 -0
- bb_integrations_lib/pipelines/wrappers/accessorials_transformation.py +104 -0
- bb_integrations_lib/pipelines/wrappers/distribution_report.py +191 -0
- bb_integrations_lib/pipelines/wrappers/export_tank_readings.py +237 -0
- bb_integrations_lib/pipelines/wrappers/import_tank_readings.py +192 -0
- bb_integrations_lib/pipelines/wrappers/wrapper.py +81 -0
- bb_integrations_lib/protocols/__init__.py +0 -0
- bb_integrations_lib/protocols/flat_file.py +210 -0
- bb_integrations_lib/protocols/gravitate_client.py +104 -0
- bb_integrations_lib/protocols/pipelines.py +697 -0
- bb_integrations_lib/provider/__init__.py +0 -0
- bb_integrations_lib/provider/api/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/__init__.py +0 -0
- bb_integrations_lib/provider/api/cargas/client.py +43 -0
- bb_integrations_lib/provider/api/cargas/model.py +49 -0
- bb_integrations_lib/provider/api/cargas/protocol.py +23 -0
- bb_integrations_lib/provider/api/dtn/__init__.py +0 -0
- bb_integrations_lib/provider/api/dtn/client.py +128 -0
- bb_integrations_lib/provider/api/dtn/protocol.py +9 -0
- bb_integrations_lib/provider/api/keyvu/__init__.py +0 -0
- bb_integrations_lib/provider/api/keyvu/client.py +30 -0
- bb_integrations_lib/provider/api/keyvu/model.py +149 -0
- bb_integrations_lib/provider/api/macropoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/macropoint/client.py +28 -0
- bb_integrations_lib/provider/api/macropoint/model.py +40 -0
- bb_integrations_lib/provider/api/pc_miler/__init__.py +0 -0
- bb_integrations_lib/provider/api/pc_miler/client.py +130 -0
- bb_integrations_lib/provider/api/pc_miler/model.py +6 -0
- bb_integrations_lib/provider/api/pc_miler/web_services_apis.py +131 -0
- bb_integrations_lib/provider/api/platform_science/__init__.py +0 -0
- bb_integrations_lib/provider/api/platform_science/client.py +147 -0
- bb_integrations_lib/provider/api/platform_science/model.py +82 -0
- bb_integrations_lib/provider/api/quicktrip/__init__.py +0 -0
- bb_integrations_lib/provider/api/quicktrip/client.py +52 -0
- bb_integrations_lib/provider/api/telapoint/__init__.py +0 -0
- bb_integrations_lib/provider/api/telapoint/client.py +68 -0
- bb_integrations_lib/provider/api/telapoint/model.py +178 -0
- bb_integrations_lib/provider/api/warren_rogers/__init__.py +0 -0
- bb_integrations_lib/provider/api/warren_rogers/client.py +207 -0
- bb_integrations_lib/provider/aws/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/__init__.py +0 -0
- bb_integrations_lib/provider/aws/s3/client.py +126 -0
- bb_integrations_lib/provider/ftp/__init__.py +0 -0
- bb_integrations_lib/provider/ftp/client.py +140 -0
- bb_integrations_lib/provider/ftp/interface.py +273 -0
- bb_integrations_lib/provider/ftp/model.py +76 -0
- bb_integrations_lib/provider/imap/__init__.py +0 -0
- bb_integrations_lib/provider/imap/client.py +228 -0
- bb_integrations_lib/provider/imap/model.py +3 -0
- bb_integrations_lib/provider/sqlserver/__init__.py +0 -0
- bb_integrations_lib/provider/sqlserver/client.py +106 -0
- bb_integrations_lib/secrets/__init__.py +4 -0
- bb_integrations_lib/secrets/adapters.py +98 -0
- bb_integrations_lib/secrets/credential_models.py +222 -0
- bb_integrations_lib/secrets/factory.py +85 -0
- bb_integrations_lib/secrets/providers.py +160 -0
- bb_integrations_lib/shared/__init__.py +0 -0
- bb_integrations_lib/shared/exceptions.py +25 -0
- bb_integrations_lib/shared/model.py +1039 -0
- bb_integrations_lib/shared/shared_enums.py +510 -0
- bb_integrations_lib/storage/README.md +236 -0
- bb_integrations_lib/storage/__init__.py +0 -0
- bb_integrations_lib/storage/aws/__init__.py +0 -0
- bb_integrations_lib/storage/aws/s3.py +8 -0
- bb_integrations_lib/storage/defaults.py +72 -0
- bb_integrations_lib/storage/gcs/__init__.py +0 -0
- bb_integrations_lib/storage/gcs/client.py +8 -0
- bb_integrations_lib/storage/gcsmanager/__init__.py +0 -0
- bb_integrations_lib/storage/gcsmanager/client.py +8 -0
- bb_integrations_lib/storage/setup.py +29 -0
- bb_integrations_lib/util/__init__.py +0 -0
- bb_integrations_lib/util/cache/__init__.py +0 -0
- bb_integrations_lib/util/cache/custom_ttl_cache.py +75 -0
- bb_integrations_lib/util/cache/protocol.py +9 -0
- bb_integrations_lib/util/config/__init__.py +0 -0
- bb_integrations_lib/util/config/manager.py +391 -0
- bb_integrations_lib/util/config/model.py +41 -0
- bb_integrations_lib/util/exception_logger/__init__.py +0 -0
- bb_integrations_lib/util/exception_logger/exception_logger.py +146 -0
- bb_integrations_lib/util/exception_logger/test.py +114 -0
- bb_integrations_lib/util/utils.py +364 -0
- bb_integrations_lib/workers/__init__.py +0 -0
- bb_integrations_lib/workers/groups.py +13 -0
- bb_integrations_lib/workers/rpc_worker.py +50 -0
- bb_integrations_lib/workers/topics.py +20 -0
- bb_integrations_library-3.0.11.dist-info/METADATA +59 -0
- bb_integrations_library-3.0.11.dist-info/RECORD +217 -0
- bb_integrations_library-3.0.11.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,1039 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from datetime import datetime, date
|
|
3
|
+
from enum import Enum, StrEnum
|
|
4
|
+
from io import BytesIO
|
|
5
|
+
from typing import Any, Optional, List, Literal, Union
|
|
6
|
+
from typing import Self
|
|
7
|
+
import re
|
|
8
|
+
import pandas as pd
|
|
9
|
+
from pandas import DataFrame
|
|
10
|
+
from pydantic import BaseModel, ConfigDict, constr, field_validator, PrivateAttr, \
|
|
11
|
+
model_validator, field_serializer, Field
|
|
12
|
+
|
|
13
|
+
from bb_integrations_lib.models.rita.audit import ProcessReportBase
|
|
14
|
+
from bb_integrations_lib.models.rita.config import FileConfig
|
|
15
|
+
|
|
16
|
+
from bb_integrations_lib.shared.shared_enums import TimezoneEnum, PriceType, timezone_to_canonical_name
|
|
17
|
+
|
|
18
|
+
class MappingMode(str, Enum):
|
|
19
|
+
full = "full", # Every row must be mapped. If a row doesn't have a mapping, it will not be uploaded.
|
|
20
|
+
partial = "partial", # Use a mapping if available, but otherwise use the raw value from the file.
|
|
21
|
+
skip = "skip" # No row is expected to have a map. Mappings will not be used.
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ConfigMode(Enum):
|
|
26
|
+
"""Configuration loading modes for SFTP file processing.
|
|
27
|
+
|
|
28
|
+
Determines how file configurations are loaded from RITA:
|
|
29
|
+
- FromBucket: Load all fileconfigs from a specific bucket
|
|
30
|
+
- ByName: Load a single fileconfig by bucket and name
|
|
31
|
+
- AllFiltered: Load all configs and filter by names
|
|
32
|
+
"""
|
|
33
|
+
FromBucket = "FromBucket"
|
|
34
|
+
"""Load all of the fileconfigs in a given RITA bucket specified by the `bucket_name` parameter."""
|
|
35
|
+
ByName = "ByName"
|
|
36
|
+
"""Load the single file config specified by the `bucket_name` and `config_name` parameters."""
|
|
37
|
+
AllFiltered = "AllFiltered"
|
|
38
|
+
"""Load all configs and filter down by their names. This setting ignores buckets and pulls the names from the
|
|
39
|
+
`config_names` step parameters."""
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class ConfigMatchMode(Enum):
|
|
43
|
+
"""File matching modes for SFTP file processing.
|
|
44
|
+
|
|
45
|
+
Determines how files are matched against configuration patterns:
|
|
46
|
+
- Exact: File name must exactly match the pattern
|
|
47
|
+
- Partial: File name must contain the pattern as a substring
|
|
48
|
+
- ByExtension: File extension must match the configured extension
|
|
49
|
+
"""
|
|
50
|
+
Exact = "Exact"
|
|
51
|
+
"""File name must exactly match"""
|
|
52
|
+
Partial = "Partial"
|
|
53
|
+
"""File Name must partially match"""
|
|
54
|
+
ByExtension = "ByExtension"
|
|
55
|
+
"""File extension must match provided extension"""
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class RawData(BaseModel):
|
|
59
|
+
file_name: str
|
|
60
|
+
data: Any
|
|
61
|
+
empty_ok: bool = False
|
|
62
|
+
|
|
63
|
+
@property
|
|
64
|
+
def is_empty(self) -> bool:
|
|
65
|
+
if self.data is None:
|
|
66
|
+
return True
|
|
67
|
+
if isinstance(self.data, (str, bytes)):
|
|
68
|
+
return len(self.data) == 0
|
|
69
|
+
if isinstance(self.data, pd.DataFrame):
|
|
70
|
+
return self.data.empty
|
|
71
|
+
if hasattr(self.data, '__len__'):
|
|
72
|
+
return len(self.data) == 0
|
|
73
|
+
return False
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class FileConfigRawData(RawData):
|
|
77
|
+
data_buffer_bkp: Any = None
|
|
78
|
+
file_config: FileConfig
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class FileType(str, Enum):
|
|
82
|
+
excel = "excel"
|
|
83
|
+
csv = "csv"
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class FileReference:
|
|
87
|
+
"""A reference to a file object on the filesystem for use in the bb_integrations_lib.storage API"""
|
|
88
|
+
|
|
89
|
+
# TODO: See if this can be rolled into RawData
|
|
90
|
+
def __init__(self, file_path: str, file_type: FileType, sheet_name: str = None):
|
|
91
|
+
self.file_path = file_path
|
|
92
|
+
self.file_type = file_type
|
|
93
|
+
self.sheet_name = sheet_name
|
|
94
|
+
|
|
95
|
+
def get_filename(self):
|
|
96
|
+
return os.path.basename(self.file_path)
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def is_empty(self) -> bool:
|
|
100
|
+
if not os.path.exists(self.file_path):
|
|
101
|
+
return True
|
|
102
|
+
return os.path.getsize(self.file_path) == 0
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class CredentialType(str, Enum):
|
|
106
|
+
"""
|
|
107
|
+
Enumeration of credential file types for different integrations.
|
|
108
|
+
|
|
109
|
+
Attributes:
|
|
110
|
+
ftp (str): Credential file for FTP connections.
|
|
111
|
+
aws (str): Credential file for AWS connections.
|
|
112
|
+
google (str): Credential file for Google integrations.
|
|
113
|
+
"""
|
|
114
|
+
ftp = 'ftp.credentials'
|
|
115
|
+
aws = 'aws.credentials'
|
|
116
|
+
google = 'google.credentials'
|
|
117
|
+
imap = 'imap.credentials'
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class File(BaseModel):
|
|
121
|
+
"""
|
|
122
|
+
Model representing a file with data and metadata.
|
|
123
|
+
|
|
124
|
+
Attributes:
|
|
125
|
+
file_name (str | None): Name of the file, without extension.
|
|
126
|
+
file_data (str | dict): The file content as a string or dictionary.
|
|
127
|
+
content_type (str): MIME type of the file. Defaults to empty string.
|
|
128
|
+
is_public (bool): Whether the file should be publicly accessible. Defaults to False.
|
|
129
|
+
file_extension (str): File extension. Defaults to 'csv'.
|
|
130
|
+
check_if_exists (bool): Whether to check for file existence before uploading. Defaults to True.
|
|
131
|
+
"""
|
|
132
|
+
file_name: str | None = None
|
|
133
|
+
file_data: Any
|
|
134
|
+
content_type: str = ''
|
|
135
|
+
is_public: bool = False
|
|
136
|
+
file_extension: str = 'csv'
|
|
137
|
+
check_if_exists: bool = True
|
|
138
|
+
|
|
139
|
+
class Config:
|
|
140
|
+
arbitrary_types_allowed = True
|
|
141
|
+
ser_json_bytes = "base64"
|
|
142
|
+
val_json_bytes = "base64"
|
|
143
|
+
|
|
144
|
+
@classmethod
|
|
145
|
+
def to_bytes(cls, data: Any) -> BytesIO:
|
|
146
|
+
"""
|
|
147
|
+
Convert data to a `BytesIO` object for binary file upload.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
data (Any): The data to be converted. Supports `DataFrame`, `str`, or `bytes`.
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
io.BytesIO: A `BytesIO` object containing the file data in binary format.
|
|
154
|
+
|
|
155
|
+
Raises:
|
|
156
|
+
ValueError: If data is of an unsupported type.
|
|
157
|
+
"""
|
|
158
|
+
if isinstance(data, BytesIO):
|
|
159
|
+
return data
|
|
160
|
+
elif isinstance(data, DataFrame):
|
|
161
|
+
csv_data = data.to_csv(index=False).encode('utf-8')
|
|
162
|
+
return BytesIO(csv_data)
|
|
163
|
+
elif isinstance(data, str):
|
|
164
|
+
return BytesIO(data.encode('utf-8'))
|
|
165
|
+
elif isinstance(data, bytes):
|
|
166
|
+
return BytesIO(data)
|
|
167
|
+
else:
|
|
168
|
+
raise ValueError("Unsupported data type for conversion to bytes.")
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
class FileUpload(BaseModel):
|
|
172
|
+
"""
|
|
173
|
+
Model representing the result of a file upload operation.
|
|
174
|
+
|
|
175
|
+
Attributes:
|
|
176
|
+
message (str): Status or response message from the upload.
|
|
177
|
+
bucket (Optional[str]): The storage bucket where the file was uploaded.
|
|
178
|
+
blob_path (Optional[str]): Path within the bucket or storage where the file is stored.
|
|
179
|
+
file_name (str): Name of the uploaded file.
|
|
180
|
+
file_path (str): Full path of the file on the server.
|
|
181
|
+
file_size (Optional[int]): Size of the file in bytes, if available.
|
|
182
|
+
content_type (str): MIME type of the uploaded file.
|
|
183
|
+
public_url (Optional[str]): URL for public access to the file, if applicable.
|
|
184
|
+
"""
|
|
185
|
+
message: str
|
|
186
|
+
bucket: Optional[str] = None
|
|
187
|
+
blob_path: Optional[str] = None
|
|
188
|
+
file_name: str
|
|
189
|
+
file_path: str
|
|
190
|
+
file_size: Optional[int] = None
|
|
191
|
+
content_type: str
|
|
192
|
+
public_url: Optional[str] = None
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
class OrderType(str, Enum):
|
|
196
|
+
regular = "regular"
|
|
197
|
+
backhaul = "backhaul"
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
class OrderStateGetBolsDrops(str, Enum):
|
|
201
|
+
canceled = "canceled"
|
|
202
|
+
deleted = "deleted"
|
|
203
|
+
open = "open"
|
|
204
|
+
recommended = "recommended"
|
|
205
|
+
accepted = "accepted"
|
|
206
|
+
assigned = "assigned"
|
|
207
|
+
in_progress = "in_progress"
|
|
208
|
+
complete = "complete"
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
class GetOrderBolsAndDropsRequest(BaseModel):
|
|
212
|
+
order_date_start: Optional[datetime] = None
|
|
213
|
+
order_date_end: Optional[datetime] = None
|
|
214
|
+
movement_updated_start: Optional[datetime] = None
|
|
215
|
+
movement_updated_end: Optional[datetime] = None
|
|
216
|
+
order_ids: Optional[List[str]] = None
|
|
217
|
+
order_numbers: Optional[List[int]] = None
|
|
218
|
+
order_states: Optional[List[OrderStateGetBolsDrops]] = None
|
|
219
|
+
order_type: Optional[OrderType] = None
|
|
220
|
+
include_invalid: Optional[bool] = False
|
|
221
|
+
include_bol_allocation: Optional[bool] = True
|
|
222
|
+
|
|
223
|
+
class DateWindow(BaseModel):
|
|
224
|
+
from_date: Optional[datetime] = None
|
|
225
|
+
to_date: Optional[datetime] = None
|
|
226
|
+
|
|
227
|
+
class GetFreightInvoicesRequest(BaseModel):
|
|
228
|
+
book_type: Literal["Revenue", "Cost"] | None = None
|
|
229
|
+
order_numbers: list[int] | None = None
|
|
230
|
+
order_ids: list[str] | None = None
|
|
231
|
+
invoice_numbers: list[str] | None = None
|
|
232
|
+
status: Literal["open", "sent", "blocked", "hold"] | None = None
|
|
233
|
+
counterparty_name: str | None = None
|
|
234
|
+
counterparty_id: str | None = None
|
|
235
|
+
as_of: datetime | None = None
|
|
236
|
+
between: DateWindow | None = None
|
|
237
|
+
exported: bool | None = None
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
class CreateProcess(ProcessReportBase):
|
|
241
|
+
trigger: str
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
class SourceModel(BaseModel):
|
|
245
|
+
id: str | None = None
|
|
246
|
+
source_id: str | None = None
|
|
247
|
+
source_system_id: str | None = None
|
|
248
|
+
|
|
249
|
+
def source_request(self):
|
|
250
|
+
if self.source_id and self.source_system_id:
|
|
251
|
+
return {
|
|
252
|
+
"source_id": self.source_id,
|
|
253
|
+
"source_system_id": self.source_system_id,
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
class DisabledReason(str, Enum):
|
|
258
|
+
terminal_maintenance = "Terminal Maintenance"
|
|
259
|
+
terminal_outage = "Terminal Outage"
|
|
260
|
+
supplier_out = "Supplier Out"
|
|
261
|
+
met_allocation_limit = "Met Allocation Limit"
|
|
262
|
+
contract_utilization = "Contract Utilization"
|
|
263
|
+
|
|
264
|
+
@classmethod
|
|
265
|
+
def string_of_values(cls):
|
|
266
|
+
return ",".join([v.value for v in cls.__members__.values()])
|
|
267
|
+
|
|
268
|
+
@classmethod
|
|
269
|
+
def list_of_values(cls):
|
|
270
|
+
return [v.value for v in cls.__members__.values()]
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
class PriceRow(BaseModel):
|
|
274
|
+
model_config = ConfigDict(from_attributes=True)
|
|
275
|
+
|
|
276
|
+
modify: Literal["Ignore", "Update"] = "Ignore"
|
|
277
|
+
terminal: str | None = None
|
|
278
|
+
product: str
|
|
279
|
+
supplier: str
|
|
280
|
+
counterparty: str | None = None
|
|
281
|
+
site: str | None = None
|
|
282
|
+
price: float
|
|
283
|
+
price_type: PriceType
|
|
284
|
+
contract: str | None = ""
|
|
285
|
+
timezone: TimezoneEnum | None = None
|
|
286
|
+
effective_from: datetime
|
|
287
|
+
effective_to: datetime
|
|
288
|
+
disabled: bool | None = False
|
|
289
|
+
disabled_reason: DisabledReason | None = None
|
|
290
|
+
disabled_until: datetime | None = None
|
|
291
|
+
_row_number: int | None = PrivateAttr(None)
|
|
292
|
+
|
|
293
|
+
@field_validator("price", mode="before")
|
|
294
|
+
def to_float(cls, v):
|
|
295
|
+
return float(v) if v is not None else None
|
|
296
|
+
|
|
297
|
+
def __init__(self, **data):
|
|
298
|
+
super().__init__(**data)
|
|
299
|
+
self._row_number = int(data["_row_number"]) if "_row_number" in data else None
|
|
300
|
+
|
|
301
|
+
@property
|
|
302
|
+
def primary_keys(self) -> dict:
|
|
303
|
+
"""Primary Key fields used for detecting duplicate entries"""
|
|
304
|
+
return {
|
|
305
|
+
"terminal": self.terminal,
|
|
306
|
+
"product": self.product,
|
|
307
|
+
"supplier": self.supplier,
|
|
308
|
+
"site": self.site,
|
|
309
|
+
"counterparty": self.counterparty,
|
|
310
|
+
"contract": self.contract,
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
def __hash__(self):
|
|
314
|
+
return hash(tuple(self.primary_keys.values()))
|
|
315
|
+
|
|
316
|
+
def __eq__(self, other):
|
|
317
|
+
if isinstance(other, PriceRow):
|
|
318
|
+
return self.__hash__() == other.__hash__()
|
|
319
|
+
return super().__eq__(other)
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
class SupplyPriceUpdateResponse(BaseModel):
|
|
323
|
+
contract: str | None = None
|
|
324
|
+
|
|
325
|
+
timezone: TimezoneEnum | None = None
|
|
326
|
+
effective_from: datetime
|
|
327
|
+
effective_to: datetime
|
|
328
|
+
price: float
|
|
329
|
+
price_type: PriceType
|
|
330
|
+
|
|
331
|
+
terminal_id: constr(min_length=24, max_length=24) | None = None
|
|
332
|
+
terminal_source_id: str | None = None
|
|
333
|
+
terminal_source_system_id: str | None = None
|
|
334
|
+
terminal: str | None = None
|
|
335
|
+
|
|
336
|
+
product_id: constr(min_length=24, max_length=24) | None = None
|
|
337
|
+
product_source_id: str | None = None
|
|
338
|
+
product_source_system_id: str | None = None
|
|
339
|
+
product: str | None = None
|
|
340
|
+
|
|
341
|
+
supplier_id: constr(min_length=24, max_length=24) | None = None
|
|
342
|
+
supplier_source_id: str | None = None
|
|
343
|
+
supplier_source_system_id: str | None = None
|
|
344
|
+
supplier: str | None = None
|
|
345
|
+
|
|
346
|
+
counterparty_id: constr(min_length=24, max_length=24) | None = None
|
|
347
|
+
counterparty_source_id: str | None = None
|
|
348
|
+
counterparty_source_system_id: str | None = None
|
|
349
|
+
counterparty: str | None = None
|
|
350
|
+
|
|
351
|
+
enabled: bool = True
|
|
352
|
+
disabled_until: datetime | None = None
|
|
353
|
+
min_quantity: int | None = None
|
|
354
|
+
max_quantity: int | None = None
|
|
355
|
+
curve_id: str | None = None
|
|
356
|
+
error: str | None = None
|
|
357
|
+
row: int | None = None
|
|
358
|
+
|
|
359
|
+
source_id: str | None = None
|
|
360
|
+
source_system_id: str | None = None
|
|
361
|
+
|
|
362
|
+
@field_validator("timezone")
|
|
363
|
+
def validate_timezone(cls, v):
|
|
364
|
+
return timezone_to_canonical_name(v)
|
|
365
|
+
|
|
366
|
+
@property
|
|
367
|
+
def identifier(self):
|
|
368
|
+
return (
|
|
369
|
+
self.effective_from,
|
|
370
|
+
self.effective_to,
|
|
371
|
+
self.product_id,
|
|
372
|
+
self.supplier_id,
|
|
373
|
+
self.terminal_id,
|
|
374
|
+
self.counterparty_id,
|
|
375
|
+
self.contract,
|
|
376
|
+
self.price_type,
|
|
377
|
+
)
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
class SupplyPriceUpdateManyRequest(SourceModel):
|
|
381
|
+
model_config = ConfigDict(from_attributes=True)
|
|
382
|
+
|
|
383
|
+
contract: str | None = None
|
|
384
|
+
|
|
385
|
+
timezone: TimezoneEnum | None = None
|
|
386
|
+
effective_from: datetime
|
|
387
|
+
effective_to: datetime
|
|
388
|
+
price: float
|
|
389
|
+
price_type: PriceType
|
|
390
|
+
|
|
391
|
+
terminal_id: constr(min_length=24, max_length=24) | None = None
|
|
392
|
+
terminal_source_id: str | None = None
|
|
393
|
+
terminal_source_system_id: str | None = None
|
|
394
|
+
terminal: str | None = None
|
|
395
|
+
|
|
396
|
+
product_id: constr(min_length=24, max_length=24) | None = None
|
|
397
|
+
product_source_id: str | None = None
|
|
398
|
+
product_source_system_id: str | None = None
|
|
399
|
+
product: str | None = None
|
|
400
|
+
|
|
401
|
+
supplier_id: constr(min_length=24, max_length=24) | None = None
|
|
402
|
+
supplier_source_id: str | None = None
|
|
403
|
+
supplier_source_system_id: str | None = None
|
|
404
|
+
supplier: str | None = None
|
|
405
|
+
|
|
406
|
+
counterparty_id: constr(min_length=24, max_length=24) | None = None
|
|
407
|
+
counterparty_source_id: str | None = None
|
|
408
|
+
counterparty_source_system_id: str | None = None
|
|
409
|
+
counterparty: str | None = None
|
|
410
|
+
|
|
411
|
+
store_id: constr(min_length=24, max_length=24) | None = None
|
|
412
|
+
store_source_id: str | None = None
|
|
413
|
+
store_source_system_id: str | None = None
|
|
414
|
+
store_number: str | None = None
|
|
415
|
+
|
|
416
|
+
enabled: bool = True
|
|
417
|
+
disabled_until: datetime | None = None
|
|
418
|
+
expire: datetime | None = None
|
|
419
|
+
min_quantity: int | None = None
|
|
420
|
+
max_quantity: int | None = None
|
|
421
|
+
curve_id: str | None = None
|
|
422
|
+
error: str | None = None
|
|
423
|
+
row: int | None = None
|
|
424
|
+
price_publisher: str | None = None
|
|
425
|
+
|
|
426
|
+
@field_validator("min_quantity", "max_quantity", mode="before")
|
|
427
|
+
def quantity_val(cls, v):
|
|
428
|
+
return v if v else None
|
|
429
|
+
|
|
430
|
+
@field_validator("curve_id", mode="before")
|
|
431
|
+
def convert_to_string(cls, v):
|
|
432
|
+
if v is None:
|
|
433
|
+
return v
|
|
434
|
+
return str(v)
|
|
435
|
+
|
|
436
|
+
@field_validator("supplier_source_id", "product_source_id",
|
|
437
|
+
"store_source_id", "counterparty_source_id",
|
|
438
|
+
"terminal_source_id",
|
|
439
|
+
mode="before")
|
|
440
|
+
def validate_source_ids(cls, v):
|
|
441
|
+
if isinstance(v, int):
|
|
442
|
+
return str(v)
|
|
443
|
+
return v
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
@field_validator("timezone")
|
|
447
|
+
def validate_timezone(cls, v):
|
|
448
|
+
return timezone_to_canonical_name(v)
|
|
449
|
+
|
|
450
|
+
@classmethod
|
|
451
|
+
def from_price_row(cls, row: PriceRow):
|
|
452
|
+
ret = cls.model_validate(row)
|
|
453
|
+
ret.store_number = row.site
|
|
454
|
+
return ret
|
|
455
|
+
|
|
456
|
+
@property
|
|
457
|
+
def extra_data(self):
|
|
458
|
+
return {"source_id": self.source_id, "source_system_id": self.source_system_id}
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
class PriceUpdateResponse(BaseModel):
|
|
462
|
+
created: int = 0
|
|
463
|
+
end_dated: int = 0
|
|
464
|
+
bad_data: list[SupplyPriceUpdateResponse]
|
|
465
|
+
duplicates: list[SupplyPriceUpdateResponse]
|
|
466
|
+
exact_match: list[SupplyPriceUpdateResponse]
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
class SDDirectivePriceType(str, Enum):
|
|
470
|
+
rack = "rack"
|
|
471
|
+
contract = "contract"
|
|
472
|
+
index = "index"
|
|
473
|
+
inventory = "inventory"
|
|
474
|
+
spot = "spot"
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
class SDDirectiveVolumeDist(BaseModel):
|
|
478
|
+
market: str
|
|
479
|
+
percent: float
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
class SDDirectiveContractVolume(BaseModel):
|
|
483
|
+
applicable_date: date
|
|
484
|
+
volume: float
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
class SDDirectiveKey(BaseModel):
|
|
488
|
+
"""A key definition for a directive."""
|
|
489
|
+
contract: str | None = None
|
|
490
|
+
price_type: SDDirectivePriceType
|
|
491
|
+
product_source_id: str | None = None
|
|
492
|
+
product_source_system: str | None = None
|
|
493
|
+
product: str | None = None
|
|
494
|
+
supplier_source_id: str | None = None
|
|
495
|
+
supplier_source_system: str | None = None
|
|
496
|
+
supplier: str | None = None
|
|
497
|
+
terminal_source_id: str | None = None
|
|
498
|
+
terminal_source_system: str | None = None
|
|
499
|
+
terminal: str | None = None
|
|
500
|
+
|
|
501
|
+
@model_validator(mode="after")
|
|
502
|
+
def check_ids(self) -> Self:
|
|
503
|
+
if not self.product and (not self.product_source_id or not self.product_source_system):
|
|
504
|
+
raise ValueError("Supply only one of product or (product_source_id, product_source_system)")
|
|
505
|
+
if self.product and self.product_source_id and self.product_source_system:
|
|
506
|
+
raise ValueError("Supply only one of product or (product_source_id, product_source_system)")
|
|
507
|
+
if not self.supplier and (not self.supplier_source_id or not self.supplier_source_system):
|
|
508
|
+
raise ValueError("Supply only one of supplier or (supplier_source_id, supplier_source_system)")
|
|
509
|
+
if self.supplier and self.supplier_source_id and self.supplier_source_system:
|
|
510
|
+
raise ValueError("Supply only one of supplier or (supplier_source_id, supplier_source_system)")
|
|
511
|
+
if not self.terminal and (not self.terminal_source_id or not self.terminal_source_system):
|
|
512
|
+
raise ValueError("Supply only one of terminal or (terminal_source_id, terminal_source_system)")
|
|
513
|
+
if self.terminal and self.terminal_source_id and self.terminal_source_system:
|
|
514
|
+
raise ValueError("Supply only one of terminal or (terminal_source_id, terminal_source_system)")
|
|
515
|
+
return self
|
|
516
|
+
|
|
517
|
+
|
|
518
|
+
class SDDirective(BaseModel):
|
|
519
|
+
"""Required info to create a directive in S&D"""
|
|
520
|
+
source_id: str | None = None
|
|
521
|
+
name: str
|
|
522
|
+
keys: List[SDDirectiveKey]
|
|
523
|
+
as_of: datetime
|
|
524
|
+
min: float | None = None
|
|
525
|
+
max: float | None = None
|
|
526
|
+
volume_distributions: List[SDDirectiveVolumeDist] = []
|
|
527
|
+
contract_volumes: List[SDDirectiveContractVolume] = []
|
|
528
|
+
daily_percent: float | None = None
|
|
529
|
+
weekly_percent: float | None = None
|
|
530
|
+
monthly_percent: float | None = None
|
|
531
|
+
week_start_day: str | None = None
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
class SDDirectiveUpdate(BaseModel):
|
|
535
|
+
curve_id: str
|
|
536
|
+
daily_percent: float | None
|
|
537
|
+
weekly_percent: float | None
|
|
538
|
+
monthly_percent: float | None
|
|
539
|
+
contract_volumes: List[SDDirectiveContractVolume] = []
|
|
540
|
+
|
|
541
|
+
class SDSupplierInvoiceDetailType(StrEnum):
|
|
542
|
+
tax = "tax"
|
|
543
|
+
supply = "supply"
|
|
544
|
+
|
|
545
|
+
|
|
546
|
+
class SDSupplierInvoiceDetail(BaseModel):
|
|
547
|
+
bol_number: str | None = None
|
|
548
|
+
bol_date: datetime | None = Field(
|
|
549
|
+
default=None, description="BOL date (UTC). Use bol_date_local if the tz is assumed to be terminal-local.")
|
|
550
|
+
bol_date_local: datetime | None = Field(
|
|
551
|
+
default=None, description="BOL date (local). Use bol_date if the tz is concretely known.")
|
|
552
|
+
type: SDSupplierInvoiceDetailType
|
|
553
|
+
product: dict[str, str] | None = None
|
|
554
|
+
tax_type: str | None = None
|
|
555
|
+
tax_description: str | None = None
|
|
556
|
+
tax_authority: str | None = None
|
|
557
|
+
tax_non_deferred: bool | None = None
|
|
558
|
+
rate: float | None = None
|
|
559
|
+
amount: float | None = None
|
|
560
|
+
total: float
|
|
561
|
+
uom: str | None = None
|
|
562
|
+
gross_volume: float | None = None
|
|
563
|
+
net_volume: float | None = None
|
|
564
|
+
|
|
565
|
+
class SDSupplierReconciliationInvoiceStatus(str, Enum):
|
|
566
|
+
approved = "approved"
|
|
567
|
+
unapproved = "unapproved"
|
|
568
|
+
unmatched = "unmatched"
|
|
569
|
+
void = "void"
|
|
570
|
+
hold = "void"
|
|
571
|
+
|
|
572
|
+
class SDDeliveryReconciliationMatchStatus(str, Enum):
|
|
573
|
+
approved = "approved"
|
|
574
|
+
matched = "matched"
|
|
575
|
+
unmatched = "unmatched" # only measures not orders ( WR Volumes)
|
|
576
|
+
voided = "voided"
|
|
577
|
+
|
|
578
|
+
class ERPStatus(str, Enum):
|
|
579
|
+
sent = "sent"
|
|
580
|
+
pending = "pending"
|
|
581
|
+
errors = "errors"
|
|
582
|
+
staged = "staged"
|
|
583
|
+
|
|
584
|
+
class SDGetUnexportedOrdersRequest(BaseModel):
|
|
585
|
+
as_of: str | datetime | None = None
|
|
586
|
+
|
|
587
|
+
@field_validator('as_of', mode="before")
|
|
588
|
+
def convert_to_str(cls, v):
|
|
589
|
+
if isinstance(v, datetime):
|
|
590
|
+
return v.isoformat()
|
|
591
|
+
return v
|
|
592
|
+
|
|
593
|
+
class SDGetUnexportedOrdersResponse(BaseModel):
|
|
594
|
+
order_id: str
|
|
595
|
+
order_number: str | int
|
|
596
|
+
completed_date: datetime | None = None
|
|
597
|
+
export_status: ERPStatus
|
|
598
|
+
error_message: str | None = None
|
|
599
|
+
|
|
600
|
+
|
|
601
|
+
class SDSetOrderExportStatusRequest(BaseModel):
|
|
602
|
+
order_id: str
|
|
603
|
+
status: ERPStatus
|
|
604
|
+
error: str | None = None
|
|
605
|
+
|
|
606
|
+
class SDGetAllSupplierReconciliationInvoiceRequest(BaseModel):
|
|
607
|
+
status: SDSupplierReconciliationInvoiceStatus | None = None
|
|
608
|
+
due_date_start: datetime | None = None
|
|
609
|
+
due_date_end: datetime | None = None
|
|
610
|
+
last_change_date: datetime | None = None
|
|
611
|
+
suppliers: list[str] | None = None
|
|
612
|
+
invoice_numbers: list[str] | None = None
|
|
613
|
+
include_exported: bool | None = None
|
|
614
|
+
|
|
615
|
+
class SDDeliveryReconciliationMatchOverviewRequest(BaseModel):
|
|
616
|
+
status: SDDeliveryReconciliationMatchStatus | None = None
|
|
617
|
+
store_id: str | None = None
|
|
618
|
+
from_date: datetime | None = None
|
|
619
|
+
to_date: datetime | None = None
|
|
620
|
+
|
|
621
|
+
|
|
622
|
+
class SDSupplierInvoiceCreateRequest(BaseModel):
|
|
623
|
+
invoice_number: str
|
|
624
|
+
source_name: str
|
|
625
|
+
supplier: str
|
|
626
|
+
terminal: str | None = None
|
|
627
|
+
due_date_utc: datetime | None = Field(
|
|
628
|
+
default=None,
|
|
629
|
+
description="Invoice due date (UTC). Use due_date_local if the tz is assumed to be terminal-local."
|
|
630
|
+
)
|
|
631
|
+
due_date_local: datetime | None = Field(
|
|
632
|
+
default=None, description="Invoice due date (local). Use due_date_utc if the tz is concretely known.")
|
|
633
|
+
invoice_date: datetime | None = Field(
|
|
634
|
+
default=None,
|
|
635
|
+
description="Date of invoice (UTC). Use invoice_date_local if the tz is assumed to be terminal-local."
|
|
636
|
+
)
|
|
637
|
+
invoice_date_local: datetime | None = Field(
|
|
638
|
+
default=None, description="Date of invoice (local). Use invoice_date if the tz is concretely known.")
|
|
639
|
+
details: list[SDSupplierInvoiceDetail]
|
|
640
|
+
extra_data: Optional[dict] = Field(default_factory=dict)
|
|
641
|
+
ship_to_city: str | None = None
|
|
642
|
+
ship_to_state: str | None = None
|
|
643
|
+
ship_from_city: str | None = None
|
|
644
|
+
ship_from_state: str | None = None
|
|
645
|
+
|
|
646
|
+
@field_serializer("supplier")
|
|
647
|
+
def serialize_supplier(self, supplier: str):
|
|
648
|
+
return {
|
|
649
|
+
"source_name": supplier
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
@field_serializer("terminal")
|
|
653
|
+
def serialize_terminal(self, terminal: str | None):
|
|
654
|
+
if not terminal:
|
|
655
|
+
return None
|
|
656
|
+
return {
|
|
657
|
+
"source_name": terminal
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
|
|
661
|
+
class CurvePointPrice(BaseModel):
|
|
662
|
+
CurvePointPriceId: Optional[int] = None
|
|
663
|
+
FormulaMarkerId: Optional[int] = None
|
|
664
|
+
FormulaResult: Optional[Union[float, str]] = None
|
|
665
|
+
FormulaResultId: Optional[int] = None
|
|
666
|
+
PriceTypeMeaning: Optional[str] = None
|
|
667
|
+
SourceId: Optional[Union[int, str]] = None
|
|
668
|
+
Value: Optional[float] = None
|
|
669
|
+
|
|
670
|
+
|
|
671
|
+
class PEPriceData(BaseModel):
|
|
672
|
+
CostSourceTypeMeaning: Optional[str] = None
|
|
673
|
+
CounterParty: Optional[str] = None
|
|
674
|
+
CounterPartyId: Optional[int] = None
|
|
675
|
+
CounterPartySourceId: Optional[Union[int, str]] = None
|
|
676
|
+
CounterPartySourceIdString: Optional[str] = None
|
|
677
|
+
CredentialUsername: Optional[str] = None
|
|
678
|
+
Currency: Optional[str] = None
|
|
679
|
+
CurrencyId: Optional[int] = None
|
|
680
|
+
CurrencySourceId: Optional[Union[int, str]] = None
|
|
681
|
+
CurvePointId: Optional[int] = None
|
|
682
|
+
CurvePointPrices: Optional[List[CurvePointPrice]] = None
|
|
683
|
+
CurvePointTypeMeaning: Optional[str] = None
|
|
684
|
+
EffectiveFromDateTime: Optional[Union[datetime, str]] = None
|
|
685
|
+
EffectiveToDateTime: Optional[Union[datetime, str]] = None
|
|
686
|
+
EstimateActual: Optional[str] = None
|
|
687
|
+
ExchangeSymbol: Optional[str] = None
|
|
688
|
+
IsActive: Optional[bool] = None
|
|
689
|
+
Location: Optional[str] = None
|
|
690
|
+
LocationId: Optional[int] = None
|
|
691
|
+
LocationSourceId: Optional[Union[int, str]] = None
|
|
692
|
+
LocationSourceIdString: Optional[str] = None
|
|
693
|
+
NetOrGrossMeaning: Optional[str] = None
|
|
694
|
+
PriceInstrument: Optional[str] = None
|
|
695
|
+
PriceInstrumentId: Optional[int] = None
|
|
696
|
+
PriceInstrumentSourceId: Optional[Union[int, str]] = None
|
|
697
|
+
PricePublisher: Optional[str] = None
|
|
698
|
+
PricePublisherId: Optional[int] = None
|
|
699
|
+
Product: Optional[str] = None
|
|
700
|
+
ProductId: Optional[int] = None
|
|
701
|
+
ProductSourceId: Optional[Union[int, str]] = None
|
|
702
|
+
ProductSourceIdString: Optional[str] = None
|
|
703
|
+
QuoteConfigurationId: Optional[int] = None
|
|
704
|
+
SourceContractDetailId: Optional[int] = None
|
|
705
|
+
SourceContractId: Optional[int] = None
|
|
706
|
+
SourceContractValuationPriceInstrumentId: Optional[int] = None
|
|
707
|
+
SourceId: Optional[Union[int, str]] = None
|
|
708
|
+
SourceInternalContractNumber: Optional[str] = None
|
|
709
|
+
TradePeriodFromDateTime: Optional[Union[datetime, str]] = None
|
|
710
|
+
TradePeriodToDateTime: Optional[Union[datetime, str]] = None
|
|
711
|
+
UnitOfMeasure: Optional[str] = None
|
|
712
|
+
UnitOfMeasureId: Optional[int] = None
|
|
713
|
+
UpdatedDateTime: Optional[Union[datetime, str]] = None
|
|
714
|
+
ExtendByDays: Optional[int] = None
|
|
715
|
+
Rank: Optional[int] = None
|
|
716
|
+
PriceType: Optional[str] = None
|
|
717
|
+
IsLatest: Optional[bool] = False
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+
class SQLClientParams(BaseModel):
|
|
721
|
+
server: str
|
|
722
|
+
database: str
|
|
723
|
+
username: str
|
|
724
|
+
password: str
|
|
725
|
+
echo: Optional[bool] = False
|
|
726
|
+
|
|
727
|
+
|
|
728
|
+
class RITAClientParams(BaseModel):
|
|
729
|
+
base_url: str
|
|
730
|
+
client_id: str
|
|
731
|
+
client_secret: str
|
|
732
|
+
rita_tenant: str
|
|
733
|
+
system_name: Optional[str] = "RITA"
|
|
734
|
+
|
|
735
|
+
class ReadingQuery(BaseModel):
|
|
736
|
+
by_store_numbers: Optional[list[str]] = None
|
|
737
|
+
by_tank_ids: Optional[list[int]] = None
|
|
738
|
+
by_market: Optional[list[str]] = None
|
|
739
|
+
by_counterparty: Optional[list[str]] = None
|
|
740
|
+
by_wildcard: Optional[str] = None
|
|
741
|
+
|
|
742
|
+
def as_mask(self, original: pd.DataFrame) -> pd.Series:
|
|
743
|
+
final_filt = pd.Series(data=True, index=original.index)
|
|
744
|
+
if self.by_store_numbers:
|
|
745
|
+
composite_stores = [s for s in self.by_store_numbers if ":" in s]
|
|
746
|
+
simple_stores = [s for s in self.by_store_numbers if ":" not in s]
|
|
747
|
+
store_mask = pd.Series(data=False, index=original.index)
|
|
748
|
+
if composite_stores:
|
|
749
|
+
store_mask |= original["composite_key"].str.contains(
|
|
750
|
+
"|".join(re.escape(s) for s in composite_stores), regex=True)
|
|
751
|
+
if simple_stores:
|
|
752
|
+
store_mask |= original["store_number"].isin(simple_stores)
|
|
753
|
+
final_filt &= store_mask
|
|
754
|
+
if self.by_tank_ids:
|
|
755
|
+
final_filt &= original["tank_id"].isin(self.by_tank_ids)
|
|
756
|
+
if self.by_market:
|
|
757
|
+
final_filt &= original["market"].isin(self.by_market)
|
|
758
|
+
if self.by_counterparty:
|
|
759
|
+
final_filt &= original["counterparty_name"].isin(self.by_counterparty)
|
|
760
|
+
return final_filt
|
|
761
|
+
|
|
762
|
+
|
|
763
|
+
class FileFormat(str, Enum):
|
|
764
|
+
"""
|
|
765
|
+
File format options for tank reading output files.
|
|
766
|
+
|
|
767
|
+
This enum defines the available output formats for parsed tank readings data.
|
|
768
|
+
Each format produces a different structure and field set tailored to specific
|
|
769
|
+
client requirements or integration systems.
|
|
770
|
+
|
|
771
|
+
Attributes:
|
|
772
|
+
standard (str): PDI-compatible format with the following field structure:
|
|
773
|
+
- Store Number: Store identifier from the source system
|
|
774
|
+
- Name: Store name from lookup data
|
|
775
|
+
- Tank Id: Tank identifier within the store
|
|
776
|
+
- Tank Product: Product type stored in the tank
|
|
777
|
+
- Carrier: Carrier information from tank lookup
|
|
778
|
+
- Volume: Current volume measurement in the tank
|
|
779
|
+
- Ullage: Unfilled space (storage_max - volume)
|
|
780
|
+
- Read Time: Timestamp in "YYYY-MM-DD HH:MM:SS TZ±HHMM" format
|
|
781
|
+
- Disconnected (optional): Boolean indicating if tank hasn't reported
|
|
782
|
+
within the configured threshold
|
|
783
|
+
Supports filtering to disconnected tanks only via configuration.
|
|
784
|
+
|
|
785
|
+
circlek (str): Circle K specific format with TelaPoint integration structure:
|
|
786
|
+
- ClientName: Client identifier (set to None)
|
|
787
|
+
- FacilityName: Facility name (set to None)
|
|
788
|
+
- FacilityInternalID: Internal facility ID (set to None)
|
|
789
|
+
- FacilityState: State location (set to None)
|
|
790
|
+
- VolumePercentage: Volume as percentage (set to None)
|
|
791
|
+
- TankStatus: Current tank status (set to None)
|
|
792
|
+
- TankNbr: Tank number (set to None)
|
|
793
|
+
- TankInternalID: Internal tank ID (set to None)
|
|
794
|
+
- AtgTankNumber: ATG tank number (mapped from Tank Id)
|
|
795
|
+
- ATGTankLabel: ATG tank label (set to None)
|
|
796
|
+
- Product: Product information (set to None)
|
|
797
|
+
- TankCapacity: Maximum tank capacity (set to None)
|
|
798
|
+
- Ullage: Unfilled space (set to None)
|
|
799
|
+
- SafeUllage: Safe ullage level (set to None)
|
|
800
|
+
- Volume: Current volume measurement
|
|
801
|
+
- Height: Tank height measurement (set to None)
|
|
802
|
+
- Water: Water level measurement (set to None)
|
|
803
|
+
- Temperature: Temperature measurement (set to None)
|
|
804
|
+
- InventoryDate: Formatted timestamp as "MM/DD/YYYY HH:MM"
|
|
805
|
+
- SystemUnits: Unit system (set to None)
|
|
806
|
+
- CollectionDateTimeUtc: UTC collection time (set to None)
|
|
807
|
+
- TelaPointAccountNumber: Fixed value of 100814
|
|
808
|
+
- TelaPointSiteNumber: Store number from source data
|
|
809
|
+
|
|
810
|
+
circlek2 (str): Simplified Circle K format for Gravitate system integration:
|
|
811
|
+
- storeNumber: Store number as it appears in Gravitate
|
|
812
|
+
- timestamp: Timestamp when the volume was read
|
|
813
|
+
- tankLabel: Product name assigned to the tank
|
|
814
|
+
- volume: Volume of tank at the time of reading
|
|
815
|
+
- tankNumber: Tank ID as it appears in Gravitate
|
|
816
|
+
- ullage: Unfilled space within the tank
|
|
817
|
+
- productLevel: Product level measurement (can be set to 0)
|
|
818
|
+
- waterLevel: Water level measurement (can be set to 0)
|
|
819
|
+
- temperature: Temperature measurement (can be set to 0)
|
|
820
|
+
|
|
821
|
+
Example:
|
|
822
|
+
>>> format_type = FileFormat.standard
|
|
823
|
+
>>> step_config = {"format": FileFormat.circlek2, ...}
|
|
824
|
+
"""
|
|
825
|
+
|
|
826
|
+
standard = "standard"
|
|
827
|
+
circlek = "circlek"
|
|
828
|
+
circlek2 = "circlek2"
|
|
829
|
+
reduced = "reduced"
|
|
830
|
+
|
|
831
|
+
class ExportReadingsWindowMode(StrEnum):
|
|
832
|
+
HOURS_BACK = "hours_back"
|
|
833
|
+
LATEST_ONLY = "latest_only"
|
|
834
|
+
PREVIOUS_DAY = "previous_day"
|
|
835
|
+
|
|
836
|
+
class ExportReadingsConfig(BaseModel):
|
|
837
|
+
"""Configuration for exporting tank readings to external systems.
|
|
838
|
+
|
|
839
|
+
This model defines how tank readings should be queried, formatted, and delivered
|
|
840
|
+
to external recipients via FTP or email. It supports various file formats and
|
|
841
|
+
filtering options for different client requirements.
|
|
842
|
+
|
|
843
|
+
Attributes:
|
|
844
|
+
reading_query (ReadingQuery): Query filters for selecting which readings to export
|
|
845
|
+
window_mode (ExportReadingsWindowMode): One of 3 modes that chooses what sort of date filtering to perform on
|
|
846
|
+
tank readings: ``hours_back`` will filter to readings within the last X hours as of job run time.
|
|
847
|
+
``latest_only`` will get readings within ``hours_back`` and keep only the latest one for each tank (tanks with
|
|
848
|
+
only older readings will not appear in the result set). ``previous_day`` will filter to readings within the
|
|
849
|
+
previous day as of job run time, from midnight to midnight.
|
|
850
|
+
reading_reported_timezone (str): Timezone for reading timestamps
|
|
851
|
+
hours_back (int): How many hours back to look for readings
|
|
852
|
+
file_base_name (str): Base filename for the exported file
|
|
853
|
+
file_name_date_format (str): Date format string for filename timestamps
|
|
854
|
+
ftp_directory (str): Target directory on FTP server (if using FTP delivery)
|
|
855
|
+
file_format (FileFormat): Output format (standard, circlek, circlek2)
|
|
856
|
+
email_addresses (list[str]): Email recipients (if using email delivery)
|
|
857
|
+
include_water_level (bool): Whether to include water level measurements
|
|
858
|
+
disconnected_column (bool): Whether to include disconnected tank status
|
|
859
|
+
disconnected_only (bool): Whether to export only disconnected tanks
|
|
860
|
+
disconnected_hours_threshold (float): Hours threshold for considering tanks disconnected
|
|
861
|
+
ftp_credentials (str): FTP credentials identifier for delivery
|
|
862
|
+
"""
|
|
863
|
+
reading_query: ReadingQuery
|
|
864
|
+
window_mode: ExportReadingsWindowMode
|
|
865
|
+
reading_reported_timezone: str
|
|
866
|
+
hours_back: int = 1
|
|
867
|
+
file_base_name: str
|
|
868
|
+
file_name_date_format: Optional[str] = "%Y%m%d%H%M%S"
|
|
869
|
+
ftp_directory: Optional[str] = None
|
|
870
|
+
file_format: FileFormat = FileFormat.standard
|
|
871
|
+
email_addresses: Optional[list[str]] = None
|
|
872
|
+
include_water_level: bool = False
|
|
873
|
+
disconnected_column: bool = False
|
|
874
|
+
disconnected_only: bool = False
|
|
875
|
+
disconnected_hours_threshold: Optional[float] = None
|
|
876
|
+
ftp_credentials: Optional[str] = None
|
|
877
|
+
|
|
878
|
+
|
|
879
|
+
class DistributionReportConfig(BaseModel):
|
|
880
|
+
"""Configuration for generating and distributing contract rack utilization reports.
|
|
881
|
+
|
|
882
|
+
This model defines how distribution reports should be generated from Gravitate data,
|
|
883
|
+
formatted, and delivered to external recipients via FTP or email. It handles the
|
|
884
|
+
processing of contract rack utilization data with both detailed and summary views.
|
|
885
|
+
|
|
886
|
+
Attributes:
|
|
887
|
+
file_base_name (str): Base filename for the generated report files
|
|
888
|
+
google_project_id (str): Google Cloud Project ID containing the BigQuery datasets
|
|
889
|
+
gbq_table_details (str): BigQuery table path for detailed contract rack utilization data.
|
|
890
|
+
Contains product-level details for each contract and rack combination
|
|
891
|
+
gbq_table_summary (str): BigQuery table path for summarized contract rack utilization data.
|
|
892
|
+
Contains aggregated metrics without product-level breakdown
|
|
893
|
+
file_name_date_format (str): Date format string for timestamp suffixes in filenames
|
|
894
|
+
ftp_directory (str): Target directory on FTP server (if using FTP delivery)
|
|
895
|
+
email_addresses (list[str]): Email recipients (if using email delivery)
|
|
896
|
+
"""
|
|
897
|
+
file_base_name: str
|
|
898
|
+
google_project_id: str
|
|
899
|
+
n_hours_back: int | None = None
|
|
900
|
+
include_model_mode: str | None = "latest_only"
|
|
901
|
+
order_state : str | None = "accepted"
|
|
902
|
+
days_back: int | None = None
|
|
903
|
+
days_forward: int | None = None
|
|
904
|
+
gbq_table_details: Optional[str] = "bb_reporting.contract_rack_util_product_detail"
|
|
905
|
+
gbq_table_summary: Optional[str] = "bb_reporting.contract_rack_util"
|
|
906
|
+
file_name_date_format: Optional[str] = "%Y%m%d%H%M%S"
|
|
907
|
+
ftp_directory: Optional[str] = None
|
|
908
|
+
email_addresses: Optional[list[str]] = None
|
|
909
|
+
|
|
910
|
+
@property
|
|
911
|
+
def start_date(self) -> datetime:
|
|
912
|
+
if self.days_back is not None:
|
|
913
|
+
return datetime.now() - datetime.timedelta(days=self.days_back)
|
|
914
|
+
else:
|
|
915
|
+
return datetime.now()
|
|
916
|
+
|
|
917
|
+
@property
|
|
918
|
+
def end_date(self) -> datetime:
|
|
919
|
+
if self.days_forward is not None:
|
|
920
|
+
return datetime.now() + datetime.timedelta(days=self.days_forward)
|
|
921
|
+
else:
|
|
922
|
+
return datetime.now()
|
|
923
|
+
|
|
924
|
+
class ATGConfig(BaseModel):
|
|
925
|
+
"""Configuration for a specific vendor's tank reading import.
|
|
926
|
+
|
|
927
|
+
This model defines how to import tank readings from a specific vendor through
|
|
928
|
+
SFTP or email delivery. It includes file matching settings, processing options,
|
|
929
|
+
and post-processing actions like archiving or deletion.
|
|
930
|
+
|
|
931
|
+
Attributes:
|
|
932
|
+
config_names (List[str]): List of file configuration names to process
|
|
933
|
+
archive_gcs_bucket_path (str): GCS bucket path for archiving processed files
|
|
934
|
+
gcs_credentials (str): GCS credentials identifier for bucket access
|
|
935
|
+
ftp_credentials (str): FTP credentials identifier for file retrieval
|
|
936
|
+
to_email_address (str): Email address of inbox for email attachment-based delivery.
|
|
937
|
+
from_email_address (str): Email address of sender, if desired, for email attachment-based delivery.
|
|
938
|
+
delivered_to_email_address (str): Used for certain forwarding setups where the from/to might not be our own
|
|
939
|
+
mailbox.
|
|
940
|
+
config_mode (ConfigMode): How to load file configurations (AllFiltered, FromBucket, ByName)
|
|
941
|
+
file_match_type (ConfigMatchMode): How to match files (Exact, Partial, ByExtension)
|
|
942
|
+
mapping_type (MappingMode): Mapping mode to use during parsing
|
|
943
|
+
archive_files (bool): Whether to archive files after processing
|
|
944
|
+
delete_files (bool): Whether to delete files after processing (mutually exclusive with archive_files)
|
|
945
|
+
minutes_back (int): How many minutes back to look for files
|
|
946
|
+
timezone (str): Timezone for date filtering operations
|
|
947
|
+
|
|
948
|
+
Note: archive_files and delete_files are mutually exclusive.
|
|
949
|
+
"""
|
|
950
|
+
config_names: List[str]
|
|
951
|
+
archive_gcs_bucket_path: Optional[str] = None
|
|
952
|
+
sd_credentials: str = None
|
|
953
|
+
gcs_credentials: Optional[str] = None
|
|
954
|
+
ftp_credentials: Optional[str] = None
|
|
955
|
+
email_credentials: Optional[str] = None
|
|
956
|
+
to_email_address: Optional[str] = None
|
|
957
|
+
from_email_address: Optional[str] = None
|
|
958
|
+
delivered_to_email_address: Optional[str] = None
|
|
959
|
+
attachment_extension: Optional[str] = None
|
|
960
|
+
email_subject: Optional[str] = None
|
|
961
|
+
config_mode: ConfigMode = ConfigMode.AllFiltered
|
|
962
|
+
file_match_type: ConfigMatchMode = ConfigMatchMode.Partial
|
|
963
|
+
mapping_type: MappingMode = MappingMode.full
|
|
964
|
+
archive_files: bool = False
|
|
965
|
+
delete_files: bool = False
|
|
966
|
+
minutes_back: Optional[int] = None
|
|
967
|
+
timezone: Optional[str] = None
|
|
968
|
+
|
|
969
|
+
|
|
970
|
+
@field_validator('delete_files')
|
|
971
|
+
@classmethod
|
|
972
|
+
def validate_mutually_exclusive(cls, v, info):
|
|
973
|
+
if v and info.data.get('archive_files', False):
|
|
974
|
+
raise ValueError("Cannot have both archive_files and delete_files set to True")
|
|
975
|
+
return v
|
|
976
|
+
|
|
977
|
+
@property
|
|
978
|
+
def mode(self):
|
|
979
|
+
return "ftp" if self.ftp_credentials else "email"
|
|
980
|
+
|
|
981
|
+
|
|
982
|
+
class ImportTankReadings(BaseModel):
|
|
983
|
+
"""Container for multiple vendor configurations for tank reading imports.
|
|
984
|
+
|
|
985
|
+
This model serves as a wrapper for multiple VendorConfig objects, enabling
|
|
986
|
+
batch processing of tank readings from different vendors in a single pipeline
|
|
987
|
+
execution. Each vendor can have different file sources, processing rules,
|
|
988
|
+
and post-processing actions.
|
|
989
|
+
|
|
990
|
+
Attributes:
|
|
991
|
+
configs (List[VendorConfig]): List of vendor-specific import configurations
|
|
992
|
+
Each config defines how to import and process tank readings from that vendor
|
|
993
|
+
sd_env_mode (str): Whether to target the "production" or "test" S&D environment.
|
|
994
|
+
"""
|
|
995
|
+
configs: List[ATGConfig]
|
|
996
|
+
|
|
997
|
+
|
|
998
|
+
class AgGridBaseModel(BaseModel):
|
|
999
|
+
"""Base class for AgGrid models."""
|
|
1000
|
+
|
|
1001
|
+
@classmethod
|
|
1002
|
+
def default_column_defs(cls, hidden_columns: list[str] = None, only_include_columns: list[str] = None):
|
|
1003
|
+
columns = []
|
|
1004
|
+
if hidden_columns is None:
|
|
1005
|
+
hidden_columns = []
|
|
1006
|
+
for field_name, field_info in cls.model_fields.items():
|
|
1007
|
+
display_name = field_name.replace('_', ' ').title()
|
|
1008
|
+
is_hidden = False
|
|
1009
|
+
if only_include_columns is not None:
|
|
1010
|
+
is_hidden = field_name not in only_include_columns
|
|
1011
|
+
else:
|
|
1012
|
+
is_hidden = field_name in hidden_columns
|
|
1013
|
+
column_def = {
|
|
1014
|
+
'field': field_name,
|
|
1015
|
+
'headerName': display_name,
|
|
1016
|
+
'isHidden': is_hidden,
|
|
1017
|
+
}
|
|
1018
|
+
if (hasattr(field_info.annotation, '__mro__') and
|
|
1019
|
+
BaseModel in field_info.annotation.__mro__ and
|
|
1020
|
+
hasattr(field_info.annotation, 'default_column_defs')):
|
|
1021
|
+
column_def.update({
|
|
1022
|
+
'type': 'object',
|
|
1023
|
+
'children': field_info.annotation.default_column_defs(),
|
|
1024
|
+
})
|
|
1025
|
+
else:
|
|
1026
|
+
python_type = str(field_info.annotation).lower()
|
|
1027
|
+
if 'int' in python_type or 'float' in python_type:
|
|
1028
|
+
cell_type = 'number'
|
|
1029
|
+
elif 'bool' in python_type:
|
|
1030
|
+
cell_type = 'boolean'
|
|
1031
|
+
elif 'date' in python_type:
|
|
1032
|
+
cell_type = 'date'
|
|
1033
|
+
elif 'list' in python_type or 'dict' in python_type:
|
|
1034
|
+
cell_type = 'object'
|
|
1035
|
+
else:
|
|
1036
|
+
cell_type = 'text'
|
|
1037
|
+
column_def['type'] = cell_type
|
|
1038
|
+
columns.append(column_def)
|
|
1039
|
+
return columns
|