brynq-sdk-allsolutions 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1186 @@
1
+ import hashlib
2
+ import json
3
+ import os
4
+ from typing import Callable, Tuple, Any, Union, List, Literal, Optional
5
+ import requests
6
+ import pandas as pd
7
+
8
+ from brynq_sdk_brynq import BrynQ
9
+
10
+
11
+ class AllSolutions(BrynQ):
12
+ def __init__(self, system_type: Optional[Literal['source', 'target']] = None, debug: bool = False):
13
+ super().__init__()
14
+ self.timeout = 3600
15
+ self.token = None
16
+ self.refresh_token = None
17
+ self.debug = debug
18
+ self.data_interface_id = os.getenv("DATA_INTERFACE_ID")
19
+ credentials = self.interfaces.credentials.get(system='all-solutions', system_type=system_type)
20
+ self.url = credentials['data']['url']
21
+ self.client_id = credentials['data']['client_id']
22
+ self.secret_id = credentials['data']['secret_id']
23
+ self.username = credentials['data']['username']
24
+ self.password = credentials['data']['password']
25
+ self.content_type_header = {'Content-Type': 'application/json'}
26
+ self.filter_freeform_string = "$filter-freeform"
27
+
28
+ # authentication functions
29
+ def _get_refreshtoken(self):
30
+ signature = hashlib.sha1(f"{self.username}{self.client_id}{self.secret_id}".encode()).hexdigest()
31
+ response = requests.post(url=f"{self.url}login",
32
+ headers=self.content_type_header,
33
+ data=json.dumps({
34
+ "Username": self.username,
35
+ "Signature": signature,
36
+ "Password": self.password,
37
+ "ClientId": self.client_id
38
+ }),
39
+ timeout=self.timeout)
40
+ if self.debug:
41
+ print(response.content)
42
+ response.raise_for_status()
43
+ self.token = response.json()['Token']
44
+ self.refresh_token = response.json()['RefreshToken']
45
+
46
+ def _get_token(self):
47
+ signature = hashlib.sha1(f"{self.refresh_token}{self.secret_id}".encode()).hexdigest()
48
+ response = requests.post(url=f"{self.url}refreshtoken",
49
+ headers=self.content_type_header,
50
+ data=json.dumps({
51
+ "RefreshToken": self.refresh_token,
52
+ "Signature": signature
53
+ }),
54
+ timeout=self.timeout)
55
+ if self.debug:
56
+ print(response.content)
57
+ response.raise_for_status()
58
+ self.token = response.json()['Token']
59
+ self.refresh_token = response.json()['RefreshToken']
60
+
61
+ def _get_headers_allsol(self):
62
+ if self.token is None:
63
+ self._get_refreshtoken()
64
+ else:
65
+ self._get_token()
66
+ headers = {**self.content_type_header, **{'Authorization': f'{self.token}'}}
67
+
68
+ return headers
69
+
70
+ # Get functions
71
+ def get_employees(self, filter: str = None):
72
+ self._get_headers_allsol()
73
+ total_response = []
74
+ more_results = True
75
+ params = {"pageSize": 500}
76
+ params.update({self.filter_freeform_string: filter}) if filter else None
77
+ while more_results:
78
+ response = requests.get(url=f"{self.url}mperso",
79
+ headers=self._get_headers_allsol(),
80
+ params=params,
81
+ timeout=self.timeout)
82
+ if self.debug:
83
+ print(response.content)
84
+ response.raise_for_status()
85
+ more_results = response.json()['Paging']['More']
86
+ params['cursor'] = response.json()['Paging']['NextCursor']
87
+ total_response += response.json()['Data']
88
+
89
+ return total_response
90
+
91
+ def extract_employees_allsolutions_dataframe(self, column_map) -> pd.DataFrame:
92
+ """
93
+ This method is where you extract data from allsolutions
94
+ :return: dataframe with allsolutions data
95
+ """
96
+ resp = self.get_employees()
97
+ df_employees = pd.DataFrame(resp)
98
+ df_employees['name_use_code'] = df_employees['ab02.naamstelling'].apply(lambda x: x.get('id'))
99
+ df_employees['name_use_description'] = df_employees['ab02.naamstelling'].apply(lambda x: x.get('desc'))
100
+ df_employees.rename(mapper=column_map, axis=1, inplace=True, errors='ignore')
101
+ df_employees = df_employees[column_map.values()]
102
+ return df_employees
103
+
104
+ def get_sickleave(self, filter: str = None):
105
+ self._get_headers_allsol()
106
+ total_response = []
107
+ more_results = True
108
+ params = {"pageSize": 500}
109
+ if filter:
110
+ params.update({self.filter_freeform_string: filter})
111
+
112
+ while more_results:
113
+ response = requests.get(url=f"{self.url}mzktml",
114
+ headers=self._get_headers_allsol(),
115
+ params=params,
116
+ timeout=self.timeout)
117
+ if self.debug:
118
+ print(response.content)
119
+ response.raise_for_status()
120
+ data = response.json()
121
+ more_results = data['Paging']['More']
122
+ params['cursor'] = data['Paging']['NextCursor']
123
+ total_response += data['Data']
124
+
125
+ # Now make an additional call to retrieve the partial absence percentage
126
+ for entry in total_response:
127
+ sickleave_id = entry.get('Id')
128
+ if sickleave_id:
129
+ partial_response = requests.get(
130
+ url=f"{self.url}mzktml/{sickleave_id}/partieelverzuim",
131
+ headers=self._get_headers_allsol(),
132
+ timeout=self.timeout
133
+ )
134
+ partial_response.raise_for_status()
135
+ partial_data = partial_response.json().get('Data', [])
136
+ if partial_data:
137
+ entry['percentage'] = partial_data[0].get('ap47.prc', None)
138
+
139
+ return total_response
140
+
141
+ def get_detailed_sickleave(self, filter: str = None):
142
+ self._get_headers_allsol()
143
+ total_response = []
144
+ more_results = True
145
+ params = {"pageSize": 500}
146
+ if filter:
147
+ params.update({self.filter_freeform_string: filter})
148
+
149
+ while more_results:
150
+ response = requests.get(
151
+ url=f"{self.url}mzktml",
152
+ headers=self._get_headers_allsol(),
153
+ params=params
154
+ )
155
+ if self.debug:
156
+ print(response.content)
157
+ response.raise_for_status()
158
+ data = response.json()
159
+ more_results = data['Paging']['More']
160
+ params['cursor'] = data['Paging']['NextCursor']
161
+ total_response += data['Data']
162
+
163
+ detailed_response = []
164
+
165
+ # Iterate over each sick leave entry
166
+ for entry in total_response:
167
+ sickleave_id = entry.get('Id')
168
+ employee_code = entry.get('ap46.persnr') # Adjust the key as per actual data
169
+ search_name = entry.get('ab02.zoeknaam') # Adjust the key as per actual data
170
+ sickleave_start_date = entry.get('ap46.ziektedat')
171
+ sickleave_end_date = entry.get('ap46.dat-hervat-arbo')
172
+
173
+ if sickleave_id:
174
+ partial_response = requests.get(
175
+ url=f"{self.url}mzktml/{sickleave_id}/partieelverzuim",
176
+ headers=self._get_headers_allsol()
177
+ )
178
+ partial_response.raise_for_status()
179
+ partial_data = partial_response.json().get('Data', [])
180
+
181
+ # Iterate over each partial sick leave entry
182
+ for partial_entry in partial_data:
183
+ partial_sickleave_id = partial_entry.get('Id')
184
+ partial_start_date = partial_entry.get('ap47.ingangsdat')
185
+ partial_end_date = partial_entry.get('h-einddat')
186
+ percentage = partial_entry.get('ap47.prc')
187
+
188
+ detailed_response.append({
189
+ 'search_name': search_name,
190
+ 'employee_code': employee_code,
191
+ 'sickleave_id': sickleave_id,
192
+ 'start_date': sickleave_start_date,
193
+ 'end_date': sickleave_end_date,
194
+ 'partial_sickleave_id': partial_sickleave_id,
195
+ 'partial_start_date': partial_start_date,
196
+ 'partial_end_date': partial_end_date,
197
+ 'percentage': percentage
198
+ })
199
+
200
+ return detailed_response
201
+
202
+ def get_persons(self, filter: str = None):
203
+ total_response = []
204
+ more_results = True
205
+ params = {"pageSize": 500}
206
+ params.update({self.filter_freeform_string: filter}) if filter else None
207
+ while more_results:
208
+ response = requests.get(url=f"{self.url}mrlprs",
209
+ headers=self._get_headers_allsol(),
210
+ params=params,
211
+ timeout=self.timeout)
212
+ if self.debug:
213
+ print(response.content)
214
+ response.raise_for_status()
215
+ more_results = response.json()['Paging']['More']
216
+ params['cursor'] = response.json()['Paging']['NextCursor']
217
+ total_response += response.json()['Data']
218
+
219
+ return total_response
220
+
221
+ def get_contracts(self, filter: str = None):
222
+ total_response = []
223
+ more_results = True
224
+ params = {"pageSize": 500}
225
+ if filter:
226
+ params.update({self.filter_freeform_string: filter})
227
+
228
+ while more_results:
229
+ response = requests.get(url=f"{self.url}/mappar", # Adjusted the endpoint
230
+ headers=self._get_headers_allsol(),
231
+ params=params,
232
+ timeout=self.timeout)
233
+ if self.debug:
234
+ print(response.content)
235
+
236
+ response.raise_for_status()
237
+
238
+ response_data = response.json()
239
+ more_results = response_data.get('Paging', {}).get('More', False)
240
+ next_cursor = response_data.get('Paging', {}).get('NextCursor')
241
+
242
+ if next_cursor:
243
+ params['cursor'] = next_cursor
244
+ else:
245
+ more_results = False
246
+
247
+ total_response += response_data.get('Data', [])
248
+
249
+ return total_response
250
+
251
+ def get_contract(self, employee_id: str, filter: str = None):
252
+ total_response = []
253
+ more_results = True
254
+ params = {"pageSize": 500}
255
+ params.update({self.filter_freeform_string: filter}) if filter else None
256
+ while more_results:
257
+ response = requests.get(url=f"{self.url}mperso/{employee_id}/arbeidsovereenkomsten",
258
+ headers=self._get_headers_allsol(),
259
+ params=params,
260
+ timeout=self.timeout)
261
+ if self.debug:
262
+ print(response.content)
263
+ response.raise_for_status()
264
+ more_results = response.json()['Paging']['More']
265
+ params['cursor'] = response.json()['Paging']['NextCursor']
266
+ total_response += response.json()['Data']
267
+
268
+ return total_response
269
+
270
+ def get_hours(self, employee_id: str, filter: str = None):
271
+ total_response = []
272
+ more_results = True
273
+ params = {"pageSize": 500}
274
+ params.update({self.filter_freeform_string: filter}) if filter else None
275
+ while more_results:
276
+ response = requests.get(url=f"{self.url}mperso/{employee_id}/werktijden2wk",
277
+ headers=self._get_headers_allsol(),
278
+ params=params,
279
+ timeout=self.timeout)
280
+ response.raise_for_status()
281
+ more_results = response.json()['Paging']['More']
282
+ params['cursor'] = response.json()['Paging']['NextCursor']
283
+ total_response += response.json()['Data']
284
+
285
+ return total_response
286
+
287
+ def get_managers(self, employee_id: str, filter: str = None):
288
+ total_response = []
289
+ more_results = True
290
+ params = {"pageSize": 500}
291
+ params.update({self.filter_freeform_string: filter}) if filter else None
292
+ while more_results:
293
+ response = requests.get(url=f"{self.url}mperso/{employee_id}/manager",
294
+ headers=self._get_headers_allsol(),
295
+ params=params,
296
+ timeout=self.timeout)
297
+ response.raise_for_status()
298
+ more_results = response.json()['Paging']['More']
299
+ params['cursor'] = response.json()['Paging']['NextCursor']
300
+ total_response += response.json()['Data']
301
+
302
+ return total_response
303
+
304
+ def get_functions(self, employee_id: str, filter: str = None):
305
+ total_response = []
306
+ more_results = True
307
+ params = {"pageSize": 500}
308
+ params.update({self.filter_freeform_string: filter}) if filter else None
309
+ while more_results:
310
+ response = requests.get(url=f"{self.url}mperso/{employee_id}/functies",
311
+ headers=self._get_headers_allsol(),
312
+ params=params,
313
+ timeout=self.timeout)
314
+ response.raise_for_status()
315
+ more_results = response.json()['Paging']['More']
316
+ params['cursor'] = response.json()['Paging']['NextCursor']
317
+ total_response += response.json()['Data']
318
+
319
+ return total_response
320
+
321
+ def get_costcenters(self, employee_id: str, filter: str = None):
322
+ total_response = []
323
+ more_results = True
324
+ params = {"pageSize": 500}
325
+ params.update({self.filter_freeform_string: filter}) if filter else None
326
+ while more_results:
327
+ headers = self._get_headers_allsol()
328
+ response = requests.get(url=f"{self.url}mperso/{employee_id}/thuisafdelingen",
329
+ headers=headers,
330
+ params=params,
331
+ timeout=self.timeout)
332
+ response.raise_for_status()
333
+ more_results = response.json()['Paging']['More']
334
+ params['cursor'] = response.json()['Paging']['NextCursor']
335
+ total_response += response.json()['Data']
336
+
337
+ return total_response \
338
+ \
339
+ # Post functions
340
+ def create_employee(self, data: dict) -> json:
341
+ """
342
+ Create a new employee in All Solutions
343
+ :param data: all the fields that are required to create a new employee
344
+ :return: response json
345
+ """
346
+ required_fields = ["employee_code", "employee_id_afas", "date_in_service", "email_work", "costcenter", "search_name", "function", "person_id", "hours_week", "employment", 'parttime_factor']
347
+ allowed_fields = {
348
+ "note": "ab02.notitie-edit",
349
+ "birth_date": "ab02.geb-dat",
350
+ "email_private": "ab02.email",
351
+ 'employment': "ab02.srt-mdw",
352
+ "phone_work": "ab02.telefoon-int",
353
+ "mobile_phone_work": "ab02.mobiel-int",
354
+ "contract_end_date": "ab02.einddat-contract",
355
+ "nickname": "ab02.roepnaam",
356
+ "costcenter": "ab02.ba-kd",
357
+ "function": "ab02.funktie",
358
+ "manager_employee_code": "ab02.manager",
359
+ "name_use": "ab02.naamstelling",
360
+ "parttime_factor": "h-dt-factor-afas"
361
+ }
362
+ self.__check_fields(data=data, required_fields=required_fields)
363
+
364
+ payload = {
365
+ "Data": [
366
+ {
367
+ "ab02.persnr": data['employee_code'],
368
+ "ab02.kenmerk[113]": data['employee_id_afas'],
369
+ "ab02.zoeknaam": data['search_name'],
370
+ "ab02.indat": data['date_in_service'],
371
+ "ab02.email-int": data['email_work'],
372
+ "ab02.ba-kd": data['costcenter'],
373
+ "ab02.funktie": data['function'],
374
+ "ab02.srt-mdw": data["employment"],
375
+ "h-aanw": data['hours_week'],
376
+ "h-aanw2": data['hours_week'],
377
+ "h-default7": True,
378
+ "ab02.contr-srt-kd": "1",
379
+ "ab02.notitie-edit": "Afas koppeling"
380
+ }
381
+ ]
382
+ }
383
+ if 'contract_end_date' in data:
384
+ # also add "ab02.einddat-proef" as the same date
385
+ payload['Data'][0].update({"ab02.uitdat": data['contract_end_date']})
386
+ # Add allowed fields to the body
387
+ for field in (allowed_fields.keys() & data.keys()):
388
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
389
+
390
+ url = f'{self.url}mrlprs/{data["person_id"]}/medewerkergegevens'
391
+ print(payload)
392
+
393
+ response = requests.post(url=url,
394
+ headers=self._get_headers_allsol(),
395
+ data=json.dumps(payload))
396
+ if self.debug:
397
+ print(response.content)
398
+ print(payload)
399
+ response.raise_for_status()
400
+
401
+ return response
402
+
403
+ def create_person(self, data: dict) -> json:
404
+ """
405
+ Create a new person in All Solutions
406
+ :param data: data of the person
407
+ :return: response json
408
+ """
409
+ required_fields = ["search_name", "employee_id_afas", "employee_code", "birth_date", "initials", "city", "lastname",
410
+ "street", "housenumber", "postal_code"]
411
+ allowed_fields = {
412
+ "note": "ma01.notitie-edit",
413
+ "prefix": "ma01.voor[1]",
414
+ 'firstname': "ma01.voornaam",
415
+ 'gender': "ma01.geslacht",
416
+ # "mobile_phone_private": "ma01.mobiel",
417
+ # "email_private": "ma01.email",
418
+ # "phone_private": "ma01.telefoon",
419
+ "prefix_birthname": "ma01.voor[2]",
420
+ "housenumber_addition": "ma01.b-appendix",
421
+ 'country': "ma01.b-land-kd",
422
+ "birthname": "ma01.persoon[2]",
423
+ }
424
+ self.__check_fields(data=data, required_fields=required_fields)
425
+
426
+ payload = {
427
+ "Data": [
428
+ {
429
+ "ma01.zoeknaam": data['search_name'],
430
+ 'ma01.kenmerk[43]': data['employee_id_afas'],
431
+ "ma01.persnr": data['employee_code'],
432
+ "ma01.geb-dat": data['birth_date'],
433
+ "ma01.voorl": data['initials'],
434
+ "ma01.roepnaam": data['nickname'],
435
+ "ma01.b-wpl": data['city'],
436
+ "ma01.persoon[1]": data['lastname'],
437
+ "ma01.b-adres": data['street'],
438
+ "ma01.b-num": data['housenumber'],
439
+ "ma01.b-pttkd": data['postal_code'],
440
+ "h-default6": True,
441
+ "h-default8": True,
442
+ "ma01.rel-grp": 'Medr',
443
+ "h-chk-ma01": False # Check if person already exists
444
+ }
445
+ ]
446
+ }
447
+
448
+ # Add allowed fields to the body
449
+ for field in (allowed_fields.keys() & data.keys()):
450
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
451
+
452
+ response = requests.post(url=f"{self.url}mrlprs",
453
+ headers=self._get_headers_allsol(),
454
+ data=json.dumps(payload),
455
+ timeout=self.timeout)
456
+ # if self.debug:
457
+ # print("______________________payload______________________")
458
+ # print(payload)
459
+ # print("______________________response______________________")
460
+ # print(response.content)
461
+ response.raise_for_status()
462
+
463
+ return response
464
+
465
+ def create_timetable(self, data: dict) -> json:
466
+ """
467
+ Update hours in all solutions
468
+ :param data: data to update
469
+ :return: json response
470
+ """
471
+ required_fields = ['employee_id', 'parttime_factor', 'start_date', 'hours_per_week']
472
+
473
+ self.__check_fields(data=data, required_fields=required_fields)
474
+ rounded_partime_factor = round(data['parttime_factor'], 4)
475
+ # hours_per_week = 38 * rounded_partime_factor
476
+ # make sure the hours per week are rounded to 2 decimals
477
+ # hours_per_week = round(hours_per_week, 2)
478
+
479
+ payload = {
480
+ "Data": [
481
+ {
482
+ "ap23.datum": data['start_date'],
483
+ "h-aanw": data['hours_per_week'],
484
+ "h-default1": True,
485
+ "h-aanw2": data['hours_per_week'],
486
+ "h-default2": True,
487
+ "ap23.dt-factor-afas": rounded_partime_factor,
488
+ "h-dt-factor-2wk": rounded_partime_factor
489
+
490
+ }
491
+ ]
492
+ }
493
+ if self.debug:
494
+ print('new timetable')
495
+ print(payload)
496
+ print(data['employee_id_afas'])
497
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/werktijden2wk",
498
+ headers=self._get_headers_allsol(),
499
+ data=json.dumps(payload),
500
+ timeout=self.timeout)
501
+ response.raise_for_status()
502
+ return response
503
+
504
+ def create_contract(self, data: dict) -> json:
505
+ """
506
+ Update person in all solutions
507
+ :param data: data to update
508
+ :return: json response
509
+ """
510
+ required_fields = ['employee_id', 'tracking_number', 'costcenter', 'function', 'hours_per_week', 'parttime_factor']
511
+ allowed_fields = {
512
+ "contract_start_date": "ap11.indat",
513
+ "contract_end_date": "ap11.einddat-contract",
514
+ "employee_type": "ab02.srt-mdw",
515
+ "employment": "ab02.srt-mdw"
516
+ }
517
+
518
+ self.__check_fields(data=data, required_fields=required_fields)
519
+
520
+ payload = {
521
+ "Data": [
522
+ {
523
+ "ap11.vlgnr": data['tracking_number'],
524
+ "ab02.ba-kd": data['costcenter'],
525
+ "ab02.funktie": data['function'],
526
+ "h-aanw": data['hours_per_week'],
527
+ "h-aanw2": data['hours_per_week'],
528
+ "h-default7": True,
529
+ "h-dt-factor-afas": data['parttime_factor']
530
+ }
531
+ ]
532
+ }
533
+ # add the uitdat field if it is present in the data
534
+ if 'contract_end_date' in data:
535
+ payload['Data'][0].update({"ap11.uitdat": data['contract_end_date']})
536
+
537
+ # Add allowed fields to the body
538
+ for field in (allowed_fields.keys() & data.keys()):
539
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
540
+
541
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/arbeidsovereenkomsten",
542
+ headers=self._get_headers_allsol(),
543
+ data=json.dumps(payload),
544
+ timeout=self.timeout)
545
+ if self.debug:
546
+ print(response.content)
547
+ print(payload)
548
+ response.raise_for_status()
549
+
550
+ return response
551
+
552
+ def create_costcenter(self, data: dict) -> json:
553
+ """
554
+ Update function in all solutions
555
+ :param data: data to update
556
+ :return: json response
557
+ """
558
+ required_fields = ['employee_id', 'start_year', 'start_week', 'costcenter']
559
+ self.__check_fields(data=data, required_fields=required_fields)
560
+
561
+ payload = {
562
+ "Data": [
563
+ {
564
+ "ab09.jaar": data['start_year'],
565
+ "ab09.periode": data['start_week'],
566
+ "ab09.ba-kd": data['costcenter']
567
+ }
568
+ ]
569
+ }
570
+
571
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/thuisafdelingen",
572
+ headers=self._get_headers_allsol(),
573
+ data=json.dumps(payload),
574
+ timeout=self.timeout)
575
+ if self.debug:
576
+ print(response.content)
577
+ print(payload)
578
+ response.raise_for_status()
579
+
580
+ return response
581
+
582
+ def create_function(self, data: dict) -> json:
583
+ """
584
+ Update department in all solutions
585
+ :param data: data to update
586
+ :return: json response
587
+ """
588
+ required_fields = ['employee_id', 'start_year', 'start_week', 'function']
589
+ self.__check_fields(data=data, required_fields=required_fields)
590
+
591
+ allowed_fields = {
592
+ "end_year": "ab13.tot-jaar",
593
+ "end_week": "ab13.tot-week"
594
+ }
595
+
596
+ payload = {
597
+ "Data": [
598
+ {
599
+ "ab13.jaar": data['start_year'],
600
+ "ab13.week": data['start_week'],
601
+ "ab13.funktie": data['function']
602
+ }
603
+ ]
604
+ }
605
+
606
+ # Add allowed fields to the body
607
+ for field in (allowed_fields.keys() & data.keys()):
608
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
609
+
610
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/functies",
611
+ headers=self._get_headers_allsol(),
612
+ data=json.dumps(payload),
613
+ timeout=self.timeout)
614
+ if self.debug:
615
+ print(response.content)
616
+ print(payload)
617
+ response.raise_for_status()
618
+
619
+ return response
620
+
621
+ def create_sickleave(self, data):
622
+ """
623
+ Update hours in all solutions
624
+ :param data: data to update
625
+ :return: json response
626
+ """
627
+ required_fields = ['employee_code', 'start_date', 'activity_code', 'sickleave_code_afas']
628
+
629
+ allowed_fields = {'end_date': "ap46.dat-hervat-arbo"}
630
+
631
+ self.__check_fields(data=data, required_fields=required_fields)
632
+ payload = {
633
+ "Data": [
634
+ {
635
+ "ap46.persnr": data['employee_code'],
636
+ "ap46.aktkd": data['activity_code'],
637
+ "ap46.ziektedat": data['start_date'],
638
+ # "ap46.dat-meld-arbo": data['start_date'],
639
+ "ap46.opm": f"Afas koppeling {data['sickleave_code_afas']}"
640
+
641
+ }
642
+ ]
643
+ }
644
+ # Add allowed fields to the body
645
+ for field in (allowed_fields.keys() & data.keys()):
646
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
647
+ url = f"{self.url}mzktml"
648
+
649
+ if self.debug:
650
+ print('sickleave')
651
+ print(url)
652
+ print(payload)
653
+
654
+ response = requests.post(url=url,
655
+ headers=self._get_headers_allsol(),
656
+ data=json.dumps(payload),
657
+ timeout=self.timeout)
658
+ return response
659
+
660
+ def create_partial_sickleave(self, data):
661
+ """
662
+ Upload partial sick leave data for each entry to all solutions.
663
+ :param data: Data to upload in the request body (the entire row as a dictionary)
664
+ :return: JSON response
665
+ """
666
+ # Required fields that must be present in the entry
667
+ required_fields = ['partieel_verzuim_start_datum', 'percentage', 'sickleave_id']
668
+
669
+ # Check required fields in the data
670
+ self.__check_fields(data=data, required_fields=required_fields)
671
+
672
+ # Map your data fields to the API's expected field names
673
+ api_field_mapping = {
674
+ 'partieel_verzuim_start_datum': 'ap47.ingangsdat',
675
+ 'percentage': 'ap47.prc',
676
+ 'sickleave_code_afas': 'ap47.opm'
677
+ }
678
+
679
+ # Construct the payload for the entry
680
+ payload_entry = {}
681
+ for field, api_field in api_field_mapping.items():
682
+ if field in data and pd.notna(data[field]) and data[field] != '':
683
+ payload_entry[api_field] = data[field]
684
+
685
+ payload = {
686
+ "Data": [payload_entry]
687
+ }
688
+
689
+ # Construct the URL using the sickleave_id from data
690
+ sickleave_id = data['sickleave_id']
691
+ url = f"/mzktml/{sickleave_id}/partieelverzuim"
692
+
693
+ # Make the POST request to the given URL
694
+ response = requests.post(
695
+ url=f"{self.url}{url}",
696
+ headers=self._get_headers_allsol(),
697
+ data=json.dumps(payload)
698
+ )
699
+ # Return the response (JSON)
700
+ return response
701
+
702
+ def update_partial_sickleave(self, data):
703
+ """
704
+ Upload partial sick leave data for each entry to all solutions.
705
+ :param data: Data to upload in the request body (the entire row as a dictionary)
706
+ :return: JSON response
707
+ """
708
+ # Required fields that must be present in the entry
709
+ required_fields = ['partieel_verzuim_start_datum', 'percentage', 'sickleave_id', 'partial_sickleave_id']
710
+
711
+ # Check required fields in the data
712
+ self.__check_fields(data=data, required_fields=required_fields)
713
+
714
+ # Map your data fields to the API's expected field names
715
+ api_field_mapping = {
716
+ 'partieel_verzuim_start_datum': 'ap47.ingangsdat',
717
+ 'percentage': 'ap47.prc',
718
+ 'remarks': 'ap47.opm'
719
+ }
720
+
721
+ # Construct the payload for the entry
722
+ payload_entry = {}
723
+ for field, api_field in api_field_mapping.items():
724
+ if field in data and pd.notna(data[field]) and data[field] != '':
725
+ payload_entry[api_field] = data[field]
726
+
727
+ payload = {
728
+ "Data": [payload_entry]
729
+ }
730
+
731
+ # Construct the URL using the sickleave_id from data
732
+ url = f"/mzktml/{data['sickleave_id']}/partieelverzuim/{data['partial_sickleave_id']}"
733
+
734
+ if self.debug:
735
+ print('partial sickleave')
736
+ print(url)
737
+ print(payload)
738
+
739
+ # Make the POST request to the given URL
740
+ response = requests.put(
741
+ url=f"{self.url}{url}",
742
+ headers=self._get_headers_allsol(),
743
+ data=json.dumps(payload),
744
+ timeout=self.timeout
745
+ )
746
+ # Return the response (JSON)
747
+ return response
748
+
749
+ def create_manager(self, data: dict) -> json:
750
+ """
751
+ :param data: data to update
752
+ :return: json response
753
+ """
754
+ required_fields = ['employee_id', 'manager_employee_code', 'year', 'week']
755
+ allowed_fields = {"year_to": "ap15.tot-jaar", "week_to": "ap15.tot-week"}
756
+ self.__check_fields(data=data, required_fields=required_fields)
757
+
758
+ payload = {"Data": [{"ap15.jaar": data['year'], "ap15.week": data['week'], "ap15.manager": data['manager_employee_code']}]}
759
+
760
+ # Add allowed fields to the body
761
+ for field in (allowed_fields.keys() & data.keys()):
762
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
763
+
764
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/manager", headers=self._get_headers_allsol(), data=json.dumps(payload), timeout=self.timeout)
765
+ if self.debug:
766
+ print(response.content)
767
+ print(payload)
768
+ response.raise_for_status()
769
+
770
+ return response
771
+
772
+ # Put functions
773
+ def update_timetable(self, data: dict) -> json:
774
+ """
775
+ Update hours in all solutions
776
+ :param data: data to update
777
+ :return: json response
778
+ """
779
+ required_fields = ['employee_id', 'hours_per_week', 'start_date', 'timetable_id', 'parttime_factor']
780
+
781
+ rounded_partime_factor = round(data['parttime_factor'], 4)
782
+
783
+ self.__check_fields(data=data, required_fields=required_fields)
784
+ payload = {
785
+ "Data": [
786
+ {
787
+ "ap23.datum": data['start_date'],
788
+ "h-aanw": data['hours_per_week'],
789
+ "h-default1": True,
790
+ "h-aanw2": data['hours_per_week'],
791
+ "h-default2": True,
792
+ "ap23.dt-factor-afas": rounded_partime_factor
793
+
794
+ }
795
+ ]
796
+ }
797
+ if self.debug:
798
+ print('edit')
799
+ print(payload)
800
+ print(data['employee_id_afas'])
801
+ response = requests.put(url=f"{self.url}mperso/{data['employee_id']}/werktijden2wk/{data['timetable_id']}",
802
+ headers=self._get_headers_allsol(),
803
+ data=json.dumps(payload),
804
+ timeout=self.timeout)
805
+ response.raise_for_status()
806
+ return response
807
+
808
+ def update_contract(self, data: dict) -> json:
809
+
810
+ """
811
+ Update person in all solutions
812
+ :param data: data to update
813
+ :return: json response
814
+ """
815
+ required_fields = ['employee_id', 'contract_id', 'tracking_number']
816
+ allowed_fields = {
817
+ "contract_end_date": "ap11.einddat-contract"
818
+ }
819
+
820
+ self.__check_fields(data=data, required_fields=required_fields)
821
+
822
+ payload = {
823
+ "Data": [
824
+ {
825
+ "ap11.vlgnr": data['tracking_number']
826
+ }
827
+ ]
828
+ }
829
+
830
+ # Add allowed fields to the body
831
+ for field in (allowed_fields.keys() & data.keys()):
832
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
833
+
834
+ response = requests.put(url=f"{self.url}mperso/{data['employee_id']}/arbeidsovereenkomsten/{data['contract_id']}",
835
+ headers=self._get_headers_allsol(),
836
+ data=json.dumps(payload),
837
+ timeout=self.timeout)
838
+ if self.debug:
839
+ print(response.content)
840
+ print(payload)
841
+ response.raise_for_status()
842
+
843
+ return response
844
+
845
+ def update_employee(self, data: dict) -> json:
846
+ """
847
+ Update an existing employee in All Solutions
848
+ :param data: data to update
849
+ :return:
850
+ """
851
+ required_fields = ['employee_id']
852
+ allowed_fields = {
853
+ 'employee_code': 'ab02.persnr',
854
+ 'birth_date': 'ab02.geb-dat',
855
+ 'employee_id_afas': "ab02.kenmerk[113]",
856
+ 'date_in_service': 'ab02.indat',
857
+ 'date_in_service_custom': 'ab02.kenmerk[62]',
858
+ 'termination_date': 'ab02.uitdat',
859
+ 'email_work': 'ab02.email-int',
860
+ 'email_private': 'ab02.email',
861
+ 'phone_work': 'ab02.telefoon-int',
862
+ 'mobile_phone_work': 'ab02.mobiel-int',
863
+ 'note': "ab02.notitie-edit",
864
+ 'employment': "ab02.srt-mdw",
865
+ "nickname": "ab02.roepnaam",
866
+ "name_use": "ab02.naamstelling"
867
+ }
868
+
869
+ self.__check_fields(data=data, required_fields=required_fields)
870
+
871
+ payload = {
872
+ "Data": [
873
+ {
874
+ "h-default7": True,
875
+ "h-default6": True, # Find corresponding employee details
876
+ "h-default5": True, # Find name automatically
877
+ "h-corr-adres": True, # save address as correspondence address
878
+ "ab02.contr-srt-kd": "1"
879
+ }
880
+ ]
881
+ }
882
+
883
+ # Add allowed fields to the body
884
+ for field in (allowed_fields.keys() & data.keys()):
885
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
886
+
887
+ response = requests.put(url=f"{self.url}mperso/{data['employee_id']}",
888
+ headers=self._get_headers_allsol(),
889
+ data=json.dumps(payload),
890
+ timeout=self.timeout)
891
+ if self.debug:
892
+ print(response.content)
893
+ print(payload)
894
+ response.raise_for_status()
895
+
896
+ return response
897
+
898
+ def update_person(self, data: dict) -> json:
899
+ """
900
+ Update person in all solutions
901
+ :param data: data to update
902
+ :return: json response
903
+ """
904
+ required_fields = ['person_id']
905
+ allowed_fields = {
906
+ "search_name": "ma01.zoeknaam",
907
+ "employee_id_afas": "ma01.mail-nr",
908
+ "employee_code": "ma01.persnr",
909
+ "birth_date": "ma01.geb-dat",
910
+ "initials": "ma01.voorl",
911
+ "firstname": "ma01.voornaam",
912
+ "nickname": "ma01.roepnaam",
913
+ "prefix": "ma01.voor[1]",
914
+ "prefix_partner": "ma01.voor[2]",
915
+ "city": "ma01.b-wpl",
916
+ "birth_name": "ma01.persoon[1]",
917
+ "lastname_partner": "ma01.persoon[2]",
918
+ "street": "ma01.b-adres",
919
+ "housenumber": "ma01.b-num",
920
+ "housenumber_addition": "ma01.b-appendix",
921
+ "postal_code": "ma01.b-pttkd",
922
+ "note": "ma01.notitie-edit",
923
+ 'gender': "ma01.geslacht",
924
+ 'country': "ma01.b-land-kd",
925
+ }
926
+
927
+ self.__check_fields(data=data, required_fields=required_fields)
928
+
929
+ payload = {
930
+ "Data": [
931
+ {
932
+ "h-default6": True,
933
+ "h-default8": True,
934
+ "ma01.rel-grp": 'Medr'
935
+ }
936
+ ]
937
+ }
938
+
939
+ # Add allowed fields to the body
940
+ for field in (allowed_fields.keys() & data.keys()):
941
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
942
+
943
+ response = requests.put(url=f"{self.url}mrlprs/{data['person_id']}",
944
+ headers=self._get_headers_allsol(),
945
+ data=json.dumps(payload),
946
+ timeout=self.timeout)
947
+ if self.debug:
948
+ print(response.content)
949
+ print(payload)
950
+
951
+ response.raise_for_status()
952
+
953
+ return response
954
+
955
+ def update_costcenter(self, data: dict) -> json:
956
+ """
957
+ Update function in all solutions
958
+ :param data: data to update
959
+ :return: json response
960
+ """
961
+ required_fields = ['employee_id', 'start_year', 'start_week', 'costcenter', 'costcenter_id']
962
+ self.__check_fields(data=data, required_fields=required_fields)
963
+
964
+ allowed_fields = {
965
+ "end_year": "ab09.tot-jaar",
966
+ "end_week": "ab09.tot-per"
967
+ }
968
+
969
+ payload = {
970
+ "Data": [
971
+ {
972
+ "ab09.jaar": data['start_year'],
973
+ "ab09.periode": data['start_week'],
974
+ "ab09.ba-kd": data['costcenter']
975
+ }
976
+ ]
977
+ }
978
+ # Add allowed fields to the body
979
+ for field in (allowed_fields.keys() & data.keys()):
980
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
981
+
982
+ response = requests.put(url=f"{self.url}mperso/{data['employee_id']}/thuisafdelingen/{data['costcenter_id']}",
983
+ headers=self._get_headers_allsol(),
984
+ data=json.dumps(payload),
985
+ timeout=self.timeout)
986
+ if self.debug:
987
+ print(response.content)
988
+ print(payload)
989
+ response.raise_for_status()
990
+
991
+ return response
992
+
993
+ def update_function(self, data: dict) -> json:
994
+ """
995
+ Update department in all solutions
996
+ :param data: data to update
997
+ :return: json response
998
+ """
999
+ required_fields = ['employee_id', 'start_year', 'start_week', 'function', 'function_id']
1000
+ self.__check_fields(data=data, required_fields=required_fields)
1001
+
1002
+ allowed_fields = {
1003
+ "end_year": "ab13.tot-jaar",
1004
+ "end_week": "ab13.tot-week"
1005
+ }
1006
+ payload = {
1007
+ "Data": [
1008
+ {
1009
+ "ab13.jaar": data['start_year'],
1010
+ "ab13.week": data['start_week'],
1011
+ "ab13.funktie": data['function']
1012
+ }
1013
+ ]
1014
+ }
1015
+
1016
+ # Add allowed fields to the body
1017
+ for field in (allowed_fields.keys() & data.keys()):
1018
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
1019
+
1020
+ response = requests.put(url=f"{self.url}mperso/{data['employee_id']}/functies/{data['function_id']}",
1021
+ headers=self._get_headers_allsol(),
1022
+ data=json.dumps(payload),
1023
+ timeout=self.timeout)
1024
+ if self.debug:
1025
+ print(response.content)
1026
+ print(payload)
1027
+ response.raise_for_status()
1028
+
1029
+ return response
1030
+
1031
+ def update_worked_hours(self, data: dict) -> json:
1032
+ """
1033
+ :param data: data to update
1034
+ :return: json response
1035
+ """
1036
+ required_fields = ['employee_id', 'id', 'hours']
1037
+ self.__check_fields(data=data, required_fields=required_fields)
1038
+
1039
+ payload = {
1040
+ "Data": [
1041
+ {
1042
+ "h-aanw": data['hours']
1043
+ }
1044
+ ]
1045
+ }
1046
+
1047
+ if self.debug:
1048
+ print(json.dumps(payload))
1049
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/Tijdelijkewerktijden",
1050
+ headers=self._get_headers_allsol(),
1051
+ data=json.dumps(payload),
1052
+ timeout=self.timeout)
1053
+ if self.debug:
1054
+ print(response.content)
1055
+ response.raise_for_status()
1056
+
1057
+ return response.json()
1058
+
1059
+ def update_contracts(self, data: dict) -> json:
1060
+ """
1061
+ :param data: data to update
1062
+ :return: json response
1063
+ """
1064
+ required_fields = ['employee_id', 'hours', 'id']
1065
+ self.__check_fields(data=data, required_fields=required_fields)
1066
+
1067
+ payload = {
1068
+ "Data": [
1069
+ {
1070
+ "h-aanw": data['hours']
1071
+ }
1072
+ ]
1073
+ }
1074
+
1075
+ if self.debug:
1076
+ print(json.dumps(payload))
1077
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/arbeidsovereenkomsten/{data['id']}",
1078
+ headers=self._get_headers_allsol(),
1079
+ data=json.dumps(payload),
1080
+ timeout=self.timeout)
1081
+ if self.debug:
1082
+ print(response.content)
1083
+ response.raise_for_status()
1084
+
1085
+ return response.json()
1086
+
1087
+ def update_sickleave(self, data):
1088
+ """
1089
+ Update sickleave in all solutions
1090
+ :param data: data to update
1091
+ :return: json response
1092
+ """
1093
+ required_fields = ['employee_code', 'start_date', 'sickleave_id', 'sickleave_code_afas']
1094
+ allowed_fields = {'end_date': "ap46.dat-hervat-arbo"}
1095
+
1096
+ self.__check_fields(data=data, required_fields=required_fields)
1097
+ payload = {
1098
+ "Data": [
1099
+ {
1100
+ "ap46.persnr": data['employee_code'],
1101
+ "ap46.ziektedat": data['start_date'],
1102
+ "ap46.opm": f"Afas koppeling {data['sickleave_code_afas']}"
1103
+ }
1104
+ ]
1105
+ }
1106
+
1107
+ # Add allowed fields to the body
1108
+ for field in (allowed_fields.keys() & data.keys()):
1109
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
1110
+
1111
+ response = requests.put(url=f"{self.url}mzktml/{data['sickleave_id']}",
1112
+ headers=self._get_headers_allsol(),
1113
+ data=json.dumps(payload),
1114
+ timeout=self.timeout)
1115
+ response.raise_for_status()
1116
+ return response
1117
+
1118
+ @staticmethod
1119
+ def __check_fields(data: dict, required_fields: List):
1120
+ for field in required_fields:
1121
+ # Check if the field is present
1122
+ if field not in data:
1123
+ raise ValueError(f'Field {field} is required. Required fields are: {tuple(required_fields)}')
1124
+
1125
+ # Check if the value of the field is None or an empty string
1126
+ if data[field] is None or data[field] == '':
1127
+ raise ValueError(f'Field {field} cannot be empty or None. Required fields are: {tuple(required_fields)}')
1128
+
1129
+
1130
+ def format_dates(df: pd.DataFrame, date_cols: List[str]) -> pd.DataFrame:
1131
+ """
1132
+ Parse the columns in *date_cols* to pandas datetime and format them back
1133
+ to ISO‑8601 strings (YYYY‑MM‑DD). Missing/invalid values become "".
1134
+ The function mutates *df* in‑place and also returns it for convenience.
1135
+ """
1136
+ for col in date_cols:
1137
+ df[col] = pd.to_datetime(df[col], errors="coerce")
1138
+ df[col] = df[col].dt.strftime("%Y-%m-%d").fillna("")
1139
+ return df
1140
+
1141
+
1142
+ def build_unique_key(
1143
+ df: pd.DataFrame, *, id_col: str, date_col: str, key_col: str
1144
+ ) -> pd.DataFrame:
1145
+ """
1146
+ Construct a textual unique key of the form <id>_<YYYY‑MM‑DD>.
1147
+ - *id_col* : column holding a unique employee or entity identifier
1148
+ - *date_col* : column with a (string or datetime) date
1149
+ - *key_col* : name of the column to create/replace
1150
+ Mutates *df* in‑place and returns it.
1151
+ """
1152
+ df[key_col] = df[id_col].astype(str) + "_" + df[date_col].astype(str)
1153
+ return df
1154
+
1155
+
1156
+ # ---------------------------------------------------------------------------
1157
+ # OPTIONAL: duplicate‑partial‑rows logger
1158
+ # ---------------------------------------------------------------------------
1159
+ def log_duplicate_partials(
1160
+ df_partial: pd.DataFrame,
1161
+ write_log: Callable[[str], None],
1162
+ subset: str | List[str] = "unique_key_partial",
1163
+ ) -> None:
1164
+ """
1165
+ Detect rows that share the same *subset* key(s) and send a readable
1166
+ message to *write_log* for each duplicate found.
1167
+ Parameters
1168
+ ----------
1169
+ df_partial : DataFrame
1170
+ The partial‑sick‑leave DataFrame.
1171
+ write_log : callable(str)
1172
+ Typically TaskScheduler.write_execution_log or any function that
1173
+ accepts a single string argument.
1174
+ subset : str | list[str]
1175
+ Column(s) that must be unique; defaults to 'unique_key_partial'.
1176
+ """
1177
+ dupes = df_partial[df_partial.duplicated(subset=subset, keep=False)]
1178
+ for _, row in dupes.iterrows():
1179
+ write_log(message=
1180
+ (
1181
+ "Duplicate partial sick‑leave record — "
1182
+ f"employee_id_afas={row.get('employee_id_afas')} "
1183
+ f"employee_code={row.get('employee_code')} "
1184
+ f"key={row.get(subset)}"
1185
+ ), data=None, loglevel="INFO"
1186
+ )
@@ -0,0 +1,17 @@
1
+ Metadata-Version: 2.4
2
+ Name: brynq_sdk_allsolutions
3
+ Version: 2.0.1
4
+ Summary: All solutions wrapper from BrynQ
5
+ Author: BrynQ
6
+ Author-email: support@brynq.com
7
+ License: BrynQ License
8
+ Requires-Dist: brynq-sdk-brynq<5,>=4
9
+ Requires-Dist: pandas<3.0.0,>=2.2.0
10
+ Dynamic: author
11
+ Dynamic: author-email
12
+ Dynamic: description
13
+ Dynamic: license
14
+ Dynamic: requires-dist
15
+ Dynamic: summary
16
+
17
+ All solutions wrapper from BrynQ
@@ -0,0 +1,5 @@
1
+ brynq_sdk_allsolutions/__init__.py,sha256=KUodmfoxSU2TUn46G3FVyM6wysk49CP1N6jVg3OhEQI,46436
2
+ brynq_sdk_allsolutions-2.0.1.dist-info/METADATA,sha256=z2stPxOrLJjj2_fhUZ3ofUqyTEAOcyW8AvWQ5DMwUmg,400
3
+ brynq_sdk_allsolutions-2.0.1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
4
+ brynq_sdk_allsolutions-2.0.1.dist-info/top_level.txt,sha256=zWGVKG2NBkmXBeeukLmQK8VMAfAnO39fj0FexLq9uBc,23
5
+ brynq_sdk_allsolutions-2.0.1.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.10.2)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1 @@
1
+ brynq_sdk_allsolutions