brynq-sdk-meta4 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brynq_sdk_meta4/__init__.py +1 -0
- brynq_sdk_meta4/cost_centers.py +74 -0
- brynq_sdk_meta4/employees.py +121 -0
- brynq_sdk_meta4/jobs.py +120 -0
- brynq_sdk_meta4/meta4.py +165 -0
- brynq_sdk_meta4/schemas/__init__.py +9 -0
- brynq_sdk_meta4/schemas/cost_center.py +39 -0
- brynq_sdk_meta4/schemas/employee.py +225 -0
- brynq_sdk_meta4/schemas/job.py +59 -0
- brynq_sdk_meta4/schemas/reference_enums.py +624 -0
- brynq_sdk_meta4-1.0.1.dist-info/METADATA +17 -0
- brynq_sdk_meta4-1.0.1.dist-info/RECORD +14 -0
- brynq_sdk_meta4-1.0.1.dist-info/WHEEL +5 -0
- brynq_sdk_meta4-1.0.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .meta4 import Meta4
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from .schemas.cost_center import CostCenterSchema
|
|
3
|
+
|
|
4
|
+
class CostCenters:
|
|
5
|
+
"""
|
|
6
|
+
Handles all cost center related operations in Meta4
|
|
7
|
+
"""
|
|
8
|
+
def __init__(self, meta4):
|
|
9
|
+
self.meta4 = meta4
|
|
10
|
+
schema_fields = CostCenterSchema.model_fields
|
|
11
|
+
column_names = [field.alias or name for name, field in schema_fields.items()]
|
|
12
|
+
self.batch_df = pd.DataFrame(columns=column_names)
|
|
13
|
+
|
|
14
|
+
def create(self, df: pd.DataFrame) -> str:
|
|
15
|
+
"""
|
|
16
|
+
Export cost center data to CSV with validation.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
df (pd.DataFrame): DataFrame containing cost center data to validate and export
|
|
20
|
+
|
|
21
|
+
Returns:
|
|
22
|
+
str: Path to the generated CSV file
|
|
23
|
+
|
|
24
|
+
Raises:
|
|
25
|
+
Exception: If validation or export fails
|
|
26
|
+
"""
|
|
27
|
+
try:
|
|
28
|
+
# Set movement_type to ALTA for all records
|
|
29
|
+
df['movement_type'] = '-36'
|
|
30
|
+
valid_df = self.meta4.validate(df=df,schema=CostCenterSchema)
|
|
31
|
+
self.batch_df = pd.concat([self.batch_df, valid_df], ignore_index=True)
|
|
32
|
+
return {"success": True, "message": "Cost centers created successfully"}
|
|
33
|
+
except Exception as e:
|
|
34
|
+
raise Exception(f"Failed to export cost centers: {e}")
|
|
35
|
+
|
|
36
|
+
def update(self, df: pd.DataFrame) -> str:
|
|
37
|
+
|
|
38
|
+
try:
|
|
39
|
+
# Set movement_type to MODIFICACION for all records
|
|
40
|
+
df['movement_type'] = '-36'
|
|
41
|
+
valid_df = self.meta4.validate(df=df,schema=CostCenterSchema)
|
|
42
|
+
self.batch_df = pd.concat([self.batch_df, valid_df], ignore_index=True)
|
|
43
|
+
return {"success": True, "message": "Cost centers updated successfully"}
|
|
44
|
+
except Exception as e:
|
|
45
|
+
raise Exception(f"Failed to update cost centers: {e}")
|
|
46
|
+
|
|
47
|
+
def delete(self, df: pd.DataFrame) -> str:
|
|
48
|
+
|
|
49
|
+
try:
|
|
50
|
+
# Set movement_type to BAJA for all records
|
|
51
|
+
df['movement_type'] = '-37'
|
|
52
|
+
valid_df = self.meta4.validate(df=df,schema=CostCenterSchema)
|
|
53
|
+
self.batch_df = pd.concat([self.batch_df, valid_df], ignore_index=True)
|
|
54
|
+
return {"success": True, "message": "Cost centers deleted successfully"}
|
|
55
|
+
except Exception as e:
|
|
56
|
+
raise Exception(f"Failed to delete cost centers: {e}")
|
|
57
|
+
|
|
58
|
+
def export(self) -> str:
|
|
59
|
+
"""
|
|
60
|
+
Export cost center data to CSV with validation.
|
|
61
|
+
"""
|
|
62
|
+
return self.meta4.export(df=self.batch_df, filename="cost_center_import.csv")
|
|
63
|
+
|
|
64
|
+
def get_batch_df(self) -> pd.DataFrame:
|
|
65
|
+
"""
|
|
66
|
+
Get the current batch DataFrame containing all validated cost center records.
|
|
67
|
+
"""
|
|
68
|
+
return self.batch_df
|
|
69
|
+
|
|
70
|
+
def clear_batch_df(self):
|
|
71
|
+
"""
|
|
72
|
+
Clear the batch DataFrame.
|
|
73
|
+
"""
|
|
74
|
+
self.batch_df = pd.DataFrame()
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from .schemas.employee import EmployeeSchema
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
class Employees:
|
|
6
|
+
"""
|
|
7
|
+
Handles all employee related operations in Meta4
|
|
8
|
+
"""
|
|
9
|
+
def __init__(self, meta4):
|
|
10
|
+
self.meta4 = meta4
|
|
11
|
+
# Initialize batch_df with EmployeeSchema columns
|
|
12
|
+
schema_fields = EmployeeSchema.model_fields
|
|
13
|
+
column_names = [field.alias or name for name, field in schema_fields.items()]
|
|
14
|
+
self.batch_df = pd.DataFrame(columns=column_names)
|
|
15
|
+
|
|
16
|
+
def create(self, df: pd.DataFrame) -> str:
|
|
17
|
+
"""
|
|
18
|
+
Create new employees (ALTA movement type).
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
df (pd.DataFrame): DataFrame containing employee data for creation
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
str: Path to the generated CSV file
|
|
25
|
+
|
|
26
|
+
Raises:
|
|
27
|
+
Exception: If validation or export fails
|
|
28
|
+
"""
|
|
29
|
+
try:
|
|
30
|
+
# Set movement_type to ALTA for all records
|
|
31
|
+
df['movement_type'] = '1'
|
|
32
|
+
validated_df = self.meta4.validate(df=df, schema=EmployeeSchema)
|
|
33
|
+
|
|
34
|
+
# Append validated DataFrame to batch_df
|
|
35
|
+
self.batch_df = pd.concat([self.batch_df, validated_df], ignore_index=True)
|
|
36
|
+
|
|
37
|
+
return {"success": True, "message": "Employees created successfully"}
|
|
38
|
+
except Exception as e:
|
|
39
|
+
raise Exception(f"Failed to create employees: {e}")
|
|
40
|
+
|
|
41
|
+
def update(self, df: pd.DataFrame) -> str:
|
|
42
|
+
"""
|
|
43
|
+
Update existing employees (MODIFICACION movement type).
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
df (pd.DataFrame): DataFrame containing employee data for update
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
str: Path to the generated CSV file
|
|
50
|
+
|
|
51
|
+
Raises:
|
|
52
|
+
Exception: If validation or export fails
|
|
53
|
+
"""
|
|
54
|
+
try:
|
|
55
|
+
# Set movement_type to MODIFICACION for all records
|
|
56
|
+
df['movement_type'] = '3'
|
|
57
|
+
validated_df = self.meta4.validate(df=df, schema=EmployeeSchema)
|
|
58
|
+
|
|
59
|
+
# Append validated DataFrame to batch_df
|
|
60
|
+
self.batch_df = pd.concat([self.batch_df, validated_df], ignore_index=True)
|
|
61
|
+
|
|
62
|
+
return {"success": True, "message": "Employees updated successfully"}
|
|
63
|
+
except Exception as e:
|
|
64
|
+
raise Exception(f"Failed to update employees: {e}")
|
|
65
|
+
|
|
66
|
+
def delete(self, df: pd.DataFrame) -> str:
|
|
67
|
+
"""
|
|
68
|
+
Delete/terminate employees (BAJA movement type).
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
df (pd.DataFrame): DataFrame containing employee data for termination
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
str: Path to the generated CSV file
|
|
75
|
+
|
|
76
|
+
Raises:
|
|
77
|
+
Exception: If validation or export fails
|
|
78
|
+
"""
|
|
79
|
+
try:
|
|
80
|
+
# Set movement_type to BAJA for all records
|
|
81
|
+
df['movement_type'] = '2'
|
|
82
|
+
validated_df = self.meta4.validate(df=df, schema=EmployeeSchema)
|
|
83
|
+
|
|
84
|
+
# Append validated DataFrame to batch_df
|
|
85
|
+
self.batch_df = pd.concat([self.batch_df, validated_df], ignore_index=True)
|
|
86
|
+
|
|
87
|
+
return {"success": True, "message": "Employees deleted successfully"}
|
|
88
|
+
except Exception as e:
|
|
89
|
+
raise Exception(f"Failed to delete employees: {e}")
|
|
90
|
+
|
|
91
|
+
def export(self) -> str:
|
|
92
|
+
"""
|
|
93
|
+
Export employee data to CSV with validation (generic export).
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
df (pd.DataFrame): DataFrame containing employee data to validate and export
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
str: Path to the generated CSV file
|
|
100
|
+
|
|
101
|
+
Raises:
|
|
102
|
+
Exception: If validation or export fails
|
|
103
|
+
"""
|
|
104
|
+
return self.meta4.export(df=self.batch_df, filename="employee_import.csv")
|
|
105
|
+
|
|
106
|
+
def get_batch_df(self) -> pd.DataFrame:
|
|
107
|
+
"""
|
|
108
|
+
Get the current batch DataFrame containing all validated employee records.
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
pd.DataFrame: The batch DataFrame with all validated records
|
|
112
|
+
"""
|
|
113
|
+
return self.batch_df
|
|
114
|
+
|
|
115
|
+
def clear_batch_df(self):
|
|
116
|
+
"""
|
|
117
|
+
Clear the batch DataFrame.
|
|
118
|
+
"""
|
|
119
|
+
schema_fields = EmployeeSchema.model_fields
|
|
120
|
+
column_names = [field.alias or name for name, field in schema_fields.items()]
|
|
121
|
+
self.batch_df = pd.DataFrame(columns=column_names)
|
brynq_sdk_meta4/jobs.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from .schemas.job import JobSchema
|
|
3
|
+
|
|
4
|
+
class Jobs:
|
|
5
|
+
"""
|
|
6
|
+
Handles all job related operations in Meta4
|
|
7
|
+
"""
|
|
8
|
+
def __init__(self, meta4):
|
|
9
|
+
self.meta4 = meta4
|
|
10
|
+
# Initialize batch_df with JobSchema columns
|
|
11
|
+
schema_fields = JobSchema.model_fields
|
|
12
|
+
column_names = [field.alias or name for name, field in schema_fields.items()]
|
|
13
|
+
self.batch_df = pd.DataFrame(columns=column_names)
|
|
14
|
+
|
|
15
|
+
def create(self, df: pd.DataFrame) -> str:
|
|
16
|
+
"""
|
|
17
|
+
Create new jobs (CREATE movement type -28).
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
df (pd.DataFrame): DataFrame containing job data for creation
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
str: Success message
|
|
24
|
+
|
|
25
|
+
Raises:
|
|
26
|
+
Exception: If validation fails
|
|
27
|
+
"""
|
|
28
|
+
try:
|
|
29
|
+
# Set movement_type to CREATE for all records
|
|
30
|
+
df['movement_type'] = '-28'
|
|
31
|
+
validated_df = self.meta4.validate(df=df, schema=JobSchema)
|
|
32
|
+
|
|
33
|
+
# Append validated DataFrame to batch_df
|
|
34
|
+
self.batch_df = pd.concat([self.batch_df, validated_df], ignore_index=True)
|
|
35
|
+
|
|
36
|
+
return {"success": True, "message": "Jobs created successfully"}
|
|
37
|
+
except Exception as e:
|
|
38
|
+
raise Exception(f"Failed to create jobs: {e}")
|
|
39
|
+
|
|
40
|
+
def update(self, df: pd.DataFrame) -> str:
|
|
41
|
+
"""
|
|
42
|
+
Update existing jobs (UPDATE movement type -29).
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
df (pd.DataFrame): DataFrame containing job data for update
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
str: Success message
|
|
49
|
+
|
|
50
|
+
Raises:
|
|
51
|
+
Exception: If validation fails
|
|
52
|
+
"""
|
|
53
|
+
try:
|
|
54
|
+
# Set movement_type to UPDATE for all records
|
|
55
|
+
df['movement_type'] = '-29'
|
|
56
|
+
validated_df = self.meta4.validate(df=df, schema=JobSchema)
|
|
57
|
+
|
|
58
|
+
# Append validated DataFrame to batch_df
|
|
59
|
+
self.batch_df = pd.concat([self.batch_df, validated_df], ignore_index=True)
|
|
60
|
+
|
|
61
|
+
return {"success": True, "message": "Jobs updated successfully"}
|
|
62
|
+
except Exception as e:
|
|
63
|
+
raise Exception(f"Failed to update jobs: {e}")
|
|
64
|
+
|
|
65
|
+
def delete(self, df: pd.DataFrame) -> str:
|
|
66
|
+
"""
|
|
67
|
+
Delete jobs (DELETE movement type -29).
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
df (pd.DataFrame): DataFrame containing job data for deletion
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
str: Success message
|
|
74
|
+
|
|
75
|
+
Raises:
|
|
76
|
+
Exception: If validation fails
|
|
77
|
+
"""
|
|
78
|
+
try:
|
|
79
|
+
# Set movement_type to DELETE for all records
|
|
80
|
+
df['movement_type'] = '-29'
|
|
81
|
+
validated_df = self.meta4.validate(df=df, schema=JobSchema)
|
|
82
|
+
|
|
83
|
+
# Append validated DataFrame to batch_df
|
|
84
|
+
self.batch_df = pd.concat([self.batch_df, validated_df], ignore_index=True)
|
|
85
|
+
|
|
86
|
+
return {"success": True, "message": "Jobs deleted successfully"}
|
|
87
|
+
except Exception as e:
|
|
88
|
+
raise Exception(f"Failed to delete jobs: {e}")
|
|
89
|
+
|
|
90
|
+
def export(self) -> str:
|
|
91
|
+
"""
|
|
92
|
+
Export job data to CSV with validation (generic export).
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
df (pd.DataFrame): DataFrame containing job data to validate and export
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
str: Path to the generated CSV file
|
|
99
|
+
|
|
100
|
+
Raises:
|
|
101
|
+
Exception: If validation or export fails
|
|
102
|
+
"""
|
|
103
|
+
return self.meta4.export(df=self.batch_df, filename="job_import.csv")
|
|
104
|
+
|
|
105
|
+
def get_batch_df(self) -> pd.DataFrame:
|
|
106
|
+
"""
|
|
107
|
+
Get the current batch DataFrame containing all validated job records.
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
pd.DataFrame: The batch DataFrame with all validated records
|
|
111
|
+
"""
|
|
112
|
+
return self.batch_df
|
|
113
|
+
|
|
114
|
+
def clear_batch_df(self):
|
|
115
|
+
"""
|
|
116
|
+
Clear the batch DataFrame.
|
|
117
|
+
"""
|
|
118
|
+
schema_fields = JobSchema.model_fields
|
|
119
|
+
column_names = [field.alias or name for name, field in schema_fields.items()]
|
|
120
|
+
self.batch_df = pd.DataFrame(columns=column_names)
|
brynq_sdk_meta4/meta4.py
ADDED
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import csv
|
|
3
|
+
from typing import List, Optional, Literal
|
|
4
|
+
import pandas as pd
|
|
5
|
+
from brynq_sdk_ftp import SFTP
|
|
6
|
+
from brynq_sdk_brynq import BrynQ
|
|
7
|
+
from brynq_sdk_functions import Functions
|
|
8
|
+
import pydantic
|
|
9
|
+
from .employees import Employees
|
|
10
|
+
from .cost_centers import CostCenters
|
|
11
|
+
from .jobs import Jobs
|
|
12
|
+
|
|
13
|
+
class Meta4(BrynQ):
|
|
14
|
+
"""
|
|
15
|
+
Meta4 HR system client for BrynQ integrations.
|
|
16
|
+
Focuses on schema validation and CSV export functionality.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
def __init__(self, system_type: Optional[Literal['source', 'target']] = None, output_path:str="outputs", debug=False):
|
|
20
|
+
"""
|
|
21
|
+
Initialize Meta4 client.
|
|
22
|
+
"""
|
|
23
|
+
super().__init__()
|
|
24
|
+
self.debug = debug
|
|
25
|
+
|
|
26
|
+
self.output_path = output_path
|
|
27
|
+
# SFTP client as a composition attribute
|
|
28
|
+
self.sftp = SFTP()
|
|
29
|
+
credentials = self.interfaces.credentials.get(system="meta-4", system_type=system_type)
|
|
30
|
+
credentials = credentials.get('data', credentials)
|
|
31
|
+
|
|
32
|
+
self.sftp._set_credentials(credentials)
|
|
33
|
+
|
|
34
|
+
# Initialize entity classes
|
|
35
|
+
self.employees = Employees(self)
|
|
36
|
+
self.cost_centers = CostCenters(self)
|
|
37
|
+
self.jobs = Jobs(self)
|
|
38
|
+
|
|
39
|
+
def validate(self, df: pd.DataFrame, schema: pydantic.BaseModel) -> pd.DataFrame:
|
|
40
|
+
"""
|
|
41
|
+
Validate data against schema and return validated DataFrame.
|
|
42
|
+
"""
|
|
43
|
+
try:
|
|
44
|
+
data_list = df.to_dict('records')
|
|
45
|
+
|
|
46
|
+
valid_data = []
|
|
47
|
+
invalid_data = []
|
|
48
|
+
for data_item in data_list:
|
|
49
|
+
try:
|
|
50
|
+
validated_item = schema(**data_item)
|
|
51
|
+
valid_data.append(validated_item.model_dump(by_alias=True, mode="json"))
|
|
52
|
+
except Exception as validation_error:
|
|
53
|
+
invalid_data.append({
|
|
54
|
+
'data': data_item,
|
|
55
|
+
'error': str(validation_error)
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
# Print invalid data count
|
|
59
|
+
if invalid_data:
|
|
60
|
+
print(f" {len(invalid_data)} lines of {schema.__name__} data validation failed:")
|
|
61
|
+
|
|
62
|
+
# Convert to DataFrame
|
|
63
|
+
df = pd.DataFrame(valid_data)
|
|
64
|
+
return df
|
|
65
|
+
except Exception as e:
|
|
66
|
+
raise Exception(f"Failed to validate data: {e}")
|
|
67
|
+
|
|
68
|
+
def export(
|
|
69
|
+
self,
|
|
70
|
+
df: pd.DataFrame,
|
|
71
|
+
filename: str
|
|
72
|
+
) -> dict:
|
|
73
|
+
"""
|
|
74
|
+
Validate data against schema and export to CSV file.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
schema_class: Pydantic schema class (e.g., EmployeeSchema, CostCenterSchema, JobSchema)
|
|
78
|
+
df: DataFrame containing data to validate and export
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
dict: Dictionary containing filepath, valid count, invalid count, and invalid data list
|
|
82
|
+
|
|
83
|
+
Raises:
|
|
84
|
+
ValidationError: If data validation fails
|
|
85
|
+
Exception: If file writing fails
|
|
86
|
+
"""
|
|
87
|
+
try:
|
|
88
|
+
os.makedirs(self.output_path, exist_ok=True)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
# Export to CSV
|
|
92
|
+
df.to_csv(
|
|
93
|
+
f"{self.output_path}/{filename}",
|
|
94
|
+
index=False,
|
|
95
|
+
encoding="utf-8-sig",
|
|
96
|
+
sep=";",
|
|
97
|
+
quotechar='"',
|
|
98
|
+
quoting=csv.QUOTE_MINIMAL
|
|
99
|
+
)
|
|
100
|
+
return {
|
|
101
|
+
'filepath': f"{self.output_path}/{filename}",
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
except Exception as e:
|
|
105
|
+
raise Exception(f"Failed to export data for {filename}: {e}")
|
|
106
|
+
|
|
107
|
+
def upload(self, upload_path: str="/") -> List[str]:
|
|
108
|
+
"""
|
|
109
|
+
Upload all CSV files from the output directory to the specified remote path.
|
|
110
|
+
|
|
111
|
+
This method scans the output directory for CSV files and uploads each one
|
|
112
|
+
to the remote server via SFTP. Files are uploaded to different directories
|
|
113
|
+
based on their filename:
|
|
114
|
+
- employee_import.csv -> ENTRADA folder
|
|
115
|
+
- Other files -> SALIDA folder
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
upload_path (str): Remote directory path where CSV files will be uploaded.
|
|
119
|
+
Must be a valid directory path on the remote server.
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
List[str]: List of successfully uploaded remote file paths.
|
|
123
|
+
Each path includes the remote directory and filename.
|
|
124
|
+
Raises:
|
|
125
|
+
Exception: For any other upload-related errors.
|
|
126
|
+
"""
|
|
127
|
+
try:
|
|
128
|
+
|
|
129
|
+
# Get list of CSV files in output directory
|
|
130
|
+
csv_files = [f for f in os.listdir(self.output_path) if f.endswith('.csv')]
|
|
131
|
+
uploaded_files = []
|
|
132
|
+
|
|
133
|
+
# Upload each CSV file
|
|
134
|
+
for csv_file in csv_files:
|
|
135
|
+
try:
|
|
136
|
+
local_filepath = os.path.join(self.output_path, csv_file)
|
|
137
|
+
|
|
138
|
+
# Determine upload directory based on filename
|
|
139
|
+
if csv_file == "employee_import.csv":
|
|
140
|
+
upload_dir = "ENTRADA/EMPLEADOS"
|
|
141
|
+
else:
|
|
142
|
+
upload_dir = "SALIDA"
|
|
143
|
+
|
|
144
|
+
remote_filepath = os.path.join(upload_path, upload_dir, csv_file).replace('\\', '/')
|
|
145
|
+
|
|
146
|
+
# Upload file using SFTP attribute
|
|
147
|
+
self.sftp.upload_file(
|
|
148
|
+
local_filepath=local_filepath,
|
|
149
|
+
remote_filepath=remote_filepath
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
uploaded_files.append(remote_filepath)
|
|
153
|
+
|
|
154
|
+
except Exception as file_error:
|
|
155
|
+
print(f"Failed to upload {csv_file}: {file_error}")
|
|
156
|
+
# Continue with other files even if one fails
|
|
157
|
+
continue
|
|
158
|
+
|
|
159
|
+
if not uploaded_files:
|
|
160
|
+
raise Exception("No files were successfully uploaded")
|
|
161
|
+
|
|
162
|
+
return f"The files successfully uploaded: {', '.join(uploaded_files)}"
|
|
163
|
+
|
|
164
|
+
except Exception as e:
|
|
165
|
+
raise Exception(f"Upload failed with unexpected error: {e}")
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from pydantic import BaseModel, Field, model_validator
|
|
2
|
+
from typing import Literal, Optional
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class CostCenterSchema(BaseModel):
|
|
6
|
+
"""
|
|
7
|
+
Pydantic schema for Meta4 Cost Center data.
|
|
8
|
+
Based on EQU_Interfaz_Centros_Coste specification.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
# Movement Information
|
|
12
|
+
movement_type: Literal["-36", "-37"] = Field(description="Tipo de movimiento", example="-36", alias="Tipo de movimiento", max_length=3)
|
|
13
|
+
|
|
14
|
+
# Cost Center Information
|
|
15
|
+
cost_center_id: Optional[str] = Field(None, description="ID Centro de Coste", example="C200", alias="ID Centro de Coste", max_length=50)
|
|
16
|
+
cost_center_name: Optional[str] = Field(None, description="Nombre Centro de Coste", example="Centro de Coste Principal", alias="Nombre Centro de Coste", max_length=62)
|
|
17
|
+
|
|
18
|
+
@model_validator(mode='after')
|
|
19
|
+
def validate_movement_requirements(self):
|
|
20
|
+
"""Validate mandatory fields based on movement_type"""
|
|
21
|
+
if self.movement_type == "-36":
|
|
22
|
+
# All fields are mandatory for CREATE (-36)
|
|
23
|
+
mandatory_fields_create = ['cost_center_id', 'cost_center_name']
|
|
24
|
+
for field_name in mandatory_fields_create:
|
|
25
|
+
field_value = getattr(self, field_name, None)
|
|
26
|
+
if field_value is None or field_value == "":
|
|
27
|
+
raise ValueError(f"{field_name} is mandatory for CREATE (-36) movement")
|
|
28
|
+
elif self.movement_type == "-37":
|
|
29
|
+
# Only cost_center_id is mandatory for UPDATE (-37)
|
|
30
|
+
mandatory_fields_update = ['cost_center_id']
|
|
31
|
+
for field_name in mandatory_fields_update:
|
|
32
|
+
field_value = getattr(self, field_name, None)
|
|
33
|
+
if field_value is None or field_value == "":
|
|
34
|
+
raise ValueError(f"{field_name} is mandatory for UPDATE (-37) movement")
|
|
35
|
+
return self
|
|
36
|
+
|
|
37
|
+
class Config:
|
|
38
|
+
allow_population_by_field_name = True
|
|
39
|
+
populate_by_name = True
|