brynq-sdk-allsolutions 1.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,10 @@
1
+ Metadata-Version: 1.0
2
+ Name: brynq_sdk_allsolutions
3
+ Version: 1.0.1
4
+ Summary: All solutions wrapper from BrynQ
5
+ Home-page: UNKNOWN
6
+ Author: BrynQ
7
+ Author-email: support@brynq.com
8
+ License: BrynQ License
9
+ Description: All solutions wrapper from BrynQ
10
+ Platform: UNKNOWN
@@ -0,0 +1,1162 @@
1
+ import hashlib
2
+ import json
3
+ from typing import Callable, Tuple, Any, Union, List
4
+ import requests
5
+ import pandas as pd
6
+
7
+ from brynq_sdk_brynq import BrynQ
8
+
9
+
10
+ class AllSolutions(BrynQ):
11
+ def __init__(self, interface_id: int, debug: bool = False):
12
+ super().__init__()
13
+ self.token = None
14
+ self.refresh_token = None
15
+ self.debug = debug
16
+ credentials = self.interfaces.credentials.get(interface_id= interface_id, system='all-solutions', system_type=None)
17
+ self.url = credentials['data']['url']
18
+ self.client_id = credentials['data']['client_id']
19
+ self.secret_id = credentials['data']['secret_id']
20
+ self.username = credentials['data']['username']
21
+ self.password = credentials['data']['password']
22
+ self.content_type_header = {'Content-Type': 'application/json'}
23
+ self.filter_freeform_string = "$filter-freeform"
24
+
25
+ #authentication functions
26
+ def _get_refreshtoken(self):
27
+ signature = hashlib.sha1(f"{self.username}{self.client_id}{self.secret_id}".encode()).hexdigest()
28
+ response = requests.post(url=f"{self.url}login",
29
+ headers=self.content_type_header,
30
+ data=json.dumps({
31
+ "Username": self.username,
32
+ "Signature": signature,
33
+ "Password": self.password,
34
+ "ClientId": self.client_id
35
+ }))
36
+ if self.debug:
37
+ print(response.content)
38
+ response.raise_for_status()
39
+ self.token = response.json()['Token']
40
+ self.refresh_token = response.json()['RefreshToken']
41
+
42
+ def _get_token(self):
43
+ signature = hashlib.sha1(f"{self.refresh_token}{self.secret_id}".encode()).hexdigest()
44
+ response = requests.post(url=f"{self.url}refreshtoken",
45
+ headers=self.content_type_header,
46
+ data=json.dumps({
47
+ "RefreshToken": self.refresh_token,
48
+ "Signature": signature
49
+ }))
50
+ if self.debug:
51
+ print(response.content)
52
+ response.raise_for_status()
53
+ self.token = response.json()['Token']
54
+ self.refresh_token = response.json()['RefreshToken']
55
+
56
+ def _get_headers_allsol(self):
57
+ if self.token is None:
58
+ self._get_refreshtoken()
59
+ else:
60
+ self._get_token()
61
+ headers = {**self.content_type_header, **{'Authorization': f'{self.token}'}}
62
+
63
+ return headers
64
+
65
+ # Get functions
66
+ def get_employees(self, filter: str = None):
67
+ self._get_headers_allsol()
68
+ total_response = []
69
+ more_results = True
70
+ params = {"pageSize": 500}
71
+ params.update({self.filter_freeform_string: filter}) if filter else None
72
+ while more_results:
73
+ response = requests.get(url=f"{self.url}mperso",
74
+ headers=self._get_headers_allsol(),
75
+ params=params)
76
+ if self.debug:
77
+ print(response.content)
78
+ response.raise_for_status()
79
+ more_results = response.json()['Paging']['More']
80
+ params['cursor'] = response.json()['Paging']['NextCursor']
81
+ total_response += response.json()['Data']
82
+
83
+ return total_response
84
+
85
+ def extract_employees_allsolutions_dataframe(self, column_map) -> pd.DataFrame:
86
+ """
87
+ This method is where you extract data from allsolutions
88
+ :return: dataframe with allsolutions data
89
+ """
90
+ resp = self.get_employees()
91
+ df_employees = pd.DataFrame(resp)
92
+ df_employees['name_use_code'] = df_employees['ab02.naamstelling'].apply(lambda x: x.get('id'))
93
+ df_employees['name_use_description'] = df_employees['ab02.naamstelling'].apply(lambda x: x.get('desc'))
94
+ df_employees.rename(mapper=column_map, axis=1, inplace=True, errors='ignore')
95
+ df_employees = df_employees[column_map.values()]
96
+ return df_employees
97
+
98
+ def get_sickleave(self, filter: str = None):
99
+ self._get_headers_allsol()
100
+ total_response = []
101
+ more_results = True
102
+ params = {"pageSize": 500}
103
+ if filter:
104
+ params.update({self.filter_freeform_string: filter})
105
+
106
+ while more_results:
107
+ response = requests.get(url=f"{self.url}mzktml",
108
+ headers=self._get_headers_allsol(),
109
+ params=params)
110
+ if self.debug:
111
+ print(response.content)
112
+ response.raise_for_status()
113
+ data = response.json()
114
+ more_results = data['Paging']['More']
115
+ params['cursor'] = data['Paging']['NextCursor']
116
+ total_response += data['Data']
117
+
118
+ # Now make an additional call to retrieve the partial absence percentage
119
+ for entry in total_response:
120
+ sickleave_id = entry.get('Id')
121
+ if sickleave_id:
122
+ partial_response = requests.get(
123
+ url=f"{self.url}mzktml/{sickleave_id}/partieelverzuim",
124
+ headers=self._get_headers_allsol()
125
+ )
126
+ partial_response.raise_for_status()
127
+ partial_data = partial_response.json().get('Data', [])
128
+ if partial_data:
129
+ entry['percentage'] = partial_data[0].get('ap47.prc', None)
130
+
131
+
132
+ return total_response
133
+
134
+ def get_detailed_sickleave(self, filter: str = None):
135
+ self._get_headers_allsol()
136
+ total_response = []
137
+ more_results = True
138
+ params = {"pageSize": 500}
139
+ if filter:
140
+ params.update({self.filter_freeform_string: filter})
141
+
142
+ while more_results:
143
+ response = requests.get(
144
+ url=f"{self.url}mzktml",
145
+ headers=self._get_headers_allsol(),
146
+ params=params
147
+ )
148
+ if self.debug:
149
+ print(response.content)
150
+ response.raise_for_status()
151
+ data = response.json()
152
+ more_results = data['Paging']['More']
153
+ params['cursor'] = data['Paging']['NextCursor']
154
+ total_response += data['Data']
155
+
156
+ detailed_response = []
157
+
158
+ # Iterate over each sick leave entry
159
+ for entry in total_response:
160
+ sickleave_id = entry.get('Id')
161
+ employee_code = entry.get('ap46.persnr') # Adjust the key as per actual data
162
+ search_name = entry.get('ab02.zoeknaam') # Adjust the key as per actual data
163
+ sickleave_start_date = entry.get('ap46.ziektedat')
164
+ sickleave_end_date = entry.get('ap46.dat-hervat-arbo')
165
+
166
+
167
+ if sickleave_id:
168
+ partial_response = requests.get(
169
+ url=f"{self.url}mzktml/{sickleave_id}/partieelverzuim",
170
+ headers=self._get_headers_allsol()
171
+ )
172
+ partial_response.raise_for_status()
173
+ partial_data = partial_response.json().get('Data', [])
174
+
175
+ # Iterate over each partial sick leave entry
176
+ for partial_entry in partial_data:
177
+ partial_sickleave_id = partial_entry.get('Id')
178
+ partial_start_date = partial_entry.get('ap47.ingangsdat')
179
+ partial_end_date = partial_entry.get('h-einddat')
180
+ percentage = partial_entry.get('ap47.prc')
181
+
182
+ detailed_response.append({
183
+ 'search_name': search_name,
184
+ 'employee_code': employee_code,
185
+ 'sickleave_id': sickleave_id,
186
+ 'start_date': sickleave_start_date,
187
+ 'end_date': sickleave_end_date,
188
+ 'partial_sickleave_id': partial_sickleave_id,
189
+ 'partial_start_date': partial_start_date,
190
+ 'partial_end_date': partial_end_date,
191
+ 'percentage': percentage
192
+ })
193
+
194
+ return detailed_response
195
+
196
+ def get_persons(self, filter: str = None):
197
+ total_response = []
198
+ more_results = True
199
+ params = {"pageSize": 500}
200
+ params.update({self.filter_freeform_string: filter}) if filter else None
201
+ while more_results:
202
+ response = requests.get(url=f"{self.url}mrlprs",
203
+ headers=self._get_headers_allsol(),
204
+ params=params)
205
+ if self.debug:
206
+ print(response.content)
207
+ response.raise_for_status()
208
+ more_results = response.json()['Paging']['More']
209
+ params['cursor'] = response.json()['Paging']['NextCursor']
210
+ total_response += response.json()['Data']
211
+
212
+ return total_response
213
+
214
+ def get_contracts(self, filter: str = None):
215
+ total_response = []
216
+ more_results = True
217
+ params = {"pageSize": 500}
218
+ if filter:
219
+ params.update({self.filter_freeform_string: filter})
220
+
221
+ while more_results:
222
+ response = requests.get(url=f"{self.url}/mappar", # Adjusted the endpoint
223
+ headers=self._get_headers_allsol(),
224
+ params=params)
225
+ if self.debug:
226
+ print(response.content)
227
+
228
+ response.raise_for_status()
229
+
230
+ response_data = response.json()
231
+ more_results = response_data.get('Paging', {}).get('More', False)
232
+ next_cursor = response_data.get('Paging', {}).get('NextCursor')
233
+
234
+ if next_cursor:
235
+ params['cursor'] = next_cursor
236
+ else:
237
+ more_results = False
238
+
239
+ total_response += response_data.get('Data', [])
240
+
241
+ return total_response
242
+
243
+ def get_contract(self, employee_id: str, filter: str = None):
244
+ total_response = []
245
+ more_results = True
246
+ params = {"pageSize": 500}
247
+ params.update({self.filter_freeform_string: filter}) if filter else None
248
+ while more_results:
249
+ response = requests.get(url=f"{self.url}mperso/{employee_id}/arbeidsovereenkomsten",
250
+ headers=self._get_headers_allsol(),
251
+ params=params)
252
+ if self.debug:
253
+ print(response.content)
254
+ response.raise_for_status()
255
+ more_results = response.json()['Paging']['More']
256
+ params['cursor'] = response.json()['Paging']['NextCursor']
257
+ total_response += response.json()['Data']
258
+
259
+ return total_response
260
+
261
+ def get_hours(self, employee_id: str, filter: str = None):
262
+ total_response = []
263
+ more_results = True
264
+ params = {"pageSize": 500}
265
+ params.update({self.filter_freeform_string: filter}) if filter else None
266
+ while more_results:
267
+ response = requests.get(url=f"{self.url}mperso/{employee_id}/werktijden2wk",
268
+ headers=self._get_headers_allsol(),
269
+ params=params)
270
+ response.raise_for_status()
271
+ more_results = response.json()['Paging']['More']
272
+ params['cursor'] = response.json()['Paging']['NextCursor']
273
+ total_response += response.json()['Data']
274
+
275
+ return total_response
276
+
277
+ def get_managers(self, employee_id: str, filter: str = None):
278
+ total_response = []
279
+ more_results = True
280
+ params = {"pageSize": 500}
281
+ params.update({self.filter_freeform_string: filter}) if filter else None
282
+ while more_results:
283
+ response = requests.get(url=f"{self.url}mperso/{employee_id}/manager",
284
+ headers=self._get_headers_allsol(),
285
+ params=params)
286
+ response.raise_for_status()
287
+ more_results = response.json()['Paging']['More']
288
+ params['cursor'] = response.json()['Paging']['NextCursor']
289
+ total_response += response.json()['Data']
290
+
291
+ return total_response
292
+
293
+ def get_functions(self, employee_id: str, filter: str = None):
294
+ total_response = []
295
+ more_results = True
296
+ params = {"pageSize": 500}
297
+ params.update({self.filter_freeform_string: filter}) if filter else None
298
+ while more_results:
299
+ response = requests.get(url=f"{self.url}mperso/{employee_id}/functies",
300
+ headers=self._get_headers_allsol(),
301
+ params=params)
302
+ response.raise_for_status()
303
+ more_results = response.json()['Paging']['More']
304
+ params['cursor'] = response.json()['Paging']['NextCursor']
305
+ total_response += response.json()['Data']
306
+
307
+ return total_response
308
+
309
+ def get_costcenters(self, employee_id: str, filter: str = None):
310
+ total_response = []
311
+ more_results = True
312
+ params = {"pageSize": 500}
313
+ params.update({self.filter_freeform_string: filter}) if filter else None
314
+ while more_results:
315
+ headers = self._get_headers_allsol()
316
+ print(headers)
317
+ response = requests.get(url=f"{self.url}mperso/{employee_id}/thuisafdelingen",
318
+ headers=headers,
319
+ params=params)
320
+ response.raise_for_status()
321
+ more_results = response.json()['Paging']['More']
322
+ params['cursor'] = response.json()['Paging']['NextCursor']
323
+ total_response += response.json()['Data']
324
+
325
+ return total_response\
326
+
327
+ # Post functions
328
+ def create_employee(self, data: dict) -> json:
329
+ """
330
+ Create a new employee in All Solutions
331
+ :param data: all the fields that are required to create a new employee
332
+ :return: response json
333
+ """
334
+ required_fields = ["employee_code", "employee_id_afas", "date_in_service", "email_work", "costcenter", "search_name", "function", "person_id", "hours_week", "employment", 'parttime_factor']
335
+ allowed_fields = {
336
+ "note": "ab02.notitie-edit",
337
+ "birth_date": "ab02.geb-dat",
338
+ "email_private": "ab02.email",
339
+ 'employment': "ab02.srt-mdw",
340
+ "phone_work": "ab02.telefoon-int",
341
+ "mobile_phone_work": "ab02.mobiel-int",
342
+ "contract_end_date": "ab02.einddat-contract",
343
+ "nickname": "ab02.roepnaam",
344
+ "costcenter": "ab02.ba-kd",
345
+ "function": "ab02.funktie",
346
+ "manager_employee_code": "ab02.manager",
347
+ "name_use": "ab02.naamstelling",
348
+ "parttime_factor": "h-dt-factor-afas"
349
+ }
350
+ self.__check_fields(data=data, required_fields=required_fields)
351
+
352
+ payload = {
353
+ "Data": [
354
+ {
355
+ "ab02.persnr": data['employee_code'],
356
+ "ab02.kenmerk[113]": data['employee_id_afas'],
357
+ "ab02.zoeknaam": data['search_name'],
358
+ "ab02.indat": data['date_in_service'],
359
+ "ab02.email-int": data['email_work'],
360
+ "ab02.ba-kd": data['costcenter'],
361
+ "ab02.funktie": data['function'],
362
+ "ab02.srt-mdw": data["employment"],
363
+ "h-aanw":data['hours_week'],
364
+ "h-aanw2": data['hours_week'],
365
+ "h-default7": True,
366
+ "ab02.contr-srt-kd": "1",
367
+ "ab02.notitie-edit": "Afas koppeling"
368
+ }
369
+ ]
370
+ }
371
+ if 'contract_end_date' in data:
372
+ #also add "ab02.einddat-proef" as the same date
373
+ payload['Data'][0].update({"ab02.uitdat": data['contract_end_date']})
374
+ # Add allowed fields to the body
375
+ for field in (allowed_fields.keys() & data.keys()):
376
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
377
+
378
+ url = f'{self.url}mrlprs/{data["person_id"]}/medewerkergegevens'
379
+ print(payload)
380
+
381
+ response = requests.post(url=url,
382
+ headers=self._get_headers_allsol(),
383
+ data=json.dumps(payload))
384
+ if self.debug:
385
+ print(response.content)
386
+ print(payload)
387
+ response.raise_for_status()
388
+
389
+ return response
390
+
391
+ def create_person(self, data: dict) -> json:
392
+ """
393
+ Create a new person in All Solutions
394
+ :param data: data of the person
395
+ :return: response json
396
+ """
397
+ required_fields = ["search_name", "employee_id_afas", "employee_code", "birth_date", "initials", "city", "lastname",
398
+ "street", "housenumber", "postal_code"]
399
+ allowed_fields = {
400
+ "note": "ma01.notitie-edit",
401
+ "prefix": "ma01.voor[1]",
402
+ 'firstname': "ma01.voornaam",
403
+ 'gender': "ma01.geslacht",
404
+ # "mobile_phone_private": "ma01.mobiel",
405
+ # "email_private": "ma01.email",
406
+ # "phone_private": "ma01.telefoon",
407
+ "prefix_birthname": "ma01.voor[2]",
408
+ "housenumber_addition": "ma01.b-appendix",
409
+ "birthname": "ma01.persoon[2]",
410
+ }
411
+ self.__check_fields(data=data, required_fields=required_fields)
412
+
413
+ payload = {
414
+ "Data": [
415
+ {
416
+ "ma01.zoeknaam": data['search_name'],
417
+ 'ma01.kenmerk[43]': data['employee_id_afas'],
418
+ "ma01.persnr": data['employee_code'],
419
+ "ma01.geb-dat": data['birth_date'],
420
+ "ma01.voorl": data['initials'],
421
+ "ma01.roepnaam": data['nickname'],
422
+ "ma01.b-wpl": data['city'],
423
+ "ma01.persoon[1]": data['lastname'],
424
+ "ma01.b-adres": data['street'],
425
+ "ma01.b-num": data['housenumber'],
426
+ "ma01.b-pttkd": data['postal_code'],
427
+ "h-default6": True,
428
+ "h-default8": True,
429
+ "ma01.rel-grp": 'Medr',
430
+ "h-chk-ma01": False # Check if person already exists
431
+ }
432
+ ]
433
+ }
434
+
435
+ # Add allowed fields to the body
436
+ for field in (allowed_fields.keys() & data.keys()):
437
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
438
+
439
+ response = requests.post(url=f"{self.url}mrlprs",
440
+ headers=self._get_headers_allsol(),
441
+ data=json.dumps(payload))
442
+ # if self.debug:
443
+ # print("______________________payload______________________")
444
+ # print(payload)
445
+ # print("______________________response______________________")
446
+ # print(response.content)
447
+ response.raise_for_status()
448
+
449
+
450
+ return response
451
+
452
+ def create_timetable(self, data: dict) -> json:
453
+ """
454
+ Update hours in all solutions
455
+ :param data: data to update
456
+ :return: json response
457
+ """
458
+ required_fields = ['employee_id', 'parttime_factor', 'start_date', 'hours_per_week']
459
+
460
+ self.__check_fields(data=data, required_fields=required_fields)
461
+ rounded_partime_factor = round(data['parttime_factor'], 4)
462
+ # hours_per_week = 38 * rounded_partime_factor
463
+ # make sure the hours per week are rounded to 2 decimals
464
+ # hours_per_week = round(hours_per_week, 2)
465
+
466
+ payload = {
467
+ "Data": [
468
+ {
469
+ "ap23.datum": data['start_date'],
470
+ "h-aanw": data['hours_per_week'],
471
+ "h-default1": True,
472
+ "h-aanw2": data['hours_per_week'],
473
+ "h-default2": True,
474
+ "ap23.dt-factor-afas": rounded_partime_factor,
475
+ "h-dt-factor-2wk": rounded_partime_factor
476
+
477
+ }
478
+ ]
479
+ }
480
+ if self.debug:
481
+ print('new timetable')
482
+ print(payload)
483
+ print(data['employee_id_afas'])
484
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/werktijden2wk",
485
+ headers=self._get_headers_allsol(),
486
+ data=json.dumps(payload))
487
+ response.raise_for_status()
488
+ return response
489
+
490
+ def create_contract(self, data: dict) -> json:
491
+ """
492
+ Update person in all solutions
493
+ :param data: data to update
494
+ :return: json response
495
+ """
496
+ required_fields = ['employee_id', 'tracking_number', 'costcenter', 'function', 'hours_per_week', 'parttime_factor']
497
+ allowed_fields = {
498
+ "contract_start_date": "ap11.indat",
499
+ "contract_end_date": "ap11.einddat-contract",
500
+ "employee_type": "ab02.srt-mdw",
501
+ "employment": "ab02.srt-mdw"
502
+ }
503
+
504
+ self.__check_fields(data=data, required_fields=required_fields)
505
+
506
+ payload = {
507
+ "Data": [
508
+ {
509
+ "ap11.vlgnr": data['tracking_number'],
510
+ "ab02.ba-kd": data['costcenter'],
511
+ "ab02.funktie": data['function'],
512
+ "h-aanw": data['hours_per_week'],
513
+ "h-aanw2": data['hours_per_week'],
514
+ "h-default7": True,
515
+ "h-dt-factor-afas": data['parttime_factor']
516
+ }
517
+ ]
518
+ }
519
+ #add the uitdat field if it is present in the data
520
+ if 'contract_end_date' in data:
521
+ payload['Data'][0].update({"ap11.uitdat": data['contract_end_date']})
522
+
523
+ # Add allowed fields to the body
524
+ for field in (allowed_fields.keys() & data.keys()):
525
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
526
+
527
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/arbeidsovereenkomsten",
528
+ headers=self._get_headers_allsol(),
529
+ data=json.dumps(payload))
530
+ if self.debug:
531
+ print(response.content)
532
+ print(payload)
533
+ response.raise_for_status()
534
+
535
+ return response
536
+
537
+ def create_costcenter(self, data: dict) -> json:
538
+ """
539
+ Update function in all solutions
540
+ :param data: data to update
541
+ :return: json response
542
+ """
543
+ required_fields = ['employee_id', 'start_year', 'start_week', 'costcenter']
544
+ self.__check_fields(data=data, required_fields=required_fields)
545
+
546
+ payload = {
547
+ "Data": [
548
+ {
549
+ "ab09.jaar": data['start_year'],
550
+ "ab09.periode": data['start_week'],
551
+ "ab09.ba-kd": data['costcenter']
552
+ }
553
+ ]
554
+ }
555
+
556
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/thuisafdelingen",
557
+ headers=self._get_headers_allsol(),
558
+ data=json.dumps(payload))
559
+ if self.debug:
560
+ print(response.content)
561
+ print(payload)
562
+ response.raise_for_status()
563
+
564
+ return response
565
+
566
+ def create_function(self, data: dict) -> json:
567
+ """
568
+ Update department in all solutions
569
+ :param data: data to update
570
+ :return: json response
571
+ """
572
+ required_fields = ['employee_id', 'start_year', 'start_week', 'function']
573
+ self.__check_fields(data=data, required_fields=required_fields)
574
+
575
+ allowed_fields = {
576
+ "end_year": "ab13.tot-jaar",
577
+ "end_week": "ab13.tot-week"
578
+ }
579
+
580
+ payload = {
581
+ "Data": [
582
+ {
583
+ "ab13.jaar": data['start_year'],
584
+ "ab13.week": data['start_week'],
585
+ "ab13.funktie": data['function']
586
+ }
587
+ ]
588
+ }
589
+
590
+ # Add allowed fields to the body
591
+ for field in (allowed_fields.keys() & data.keys()):
592
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
593
+
594
+
595
+
596
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/functies",
597
+ headers=self._get_headers_allsol(),
598
+ data=json.dumps(payload))
599
+ if self.debug:
600
+ print(response.content)
601
+ print(payload)
602
+ response.raise_for_status()
603
+
604
+ return response
605
+
606
+ def create_sickleave(self, data):
607
+ """
608
+ Update hours in all solutions
609
+ :param data: data to update
610
+ :return: json response
611
+ """
612
+ required_fields = ['employee_code', 'start_date', 'activity_code', 'sickleave_code_afas']
613
+
614
+ allowed_fields = {'end_date': "ap46.dat-hervat-arbo"}
615
+
616
+ self.__check_fields(data=data, required_fields=required_fields)
617
+ payload = {
618
+ "Data": [
619
+ {
620
+ "ap46.persnr": data['employee_code'],
621
+ "ap46.aktkd": data['activity_code'],
622
+ "ap46.ziektedat": data['start_date'],
623
+ # "ap46.dat-meld-arbo": data['start_date'],
624
+ "ap46.opm": f"Afas koppeling {data['sickleave_code_afas']}"
625
+
626
+
627
+ }
628
+ ]
629
+ }
630
+ # Add allowed fields to the body
631
+ for field in (allowed_fields.keys() & data.keys()):
632
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
633
+ url = f"{self.url}mzktml"
634
+
635
+ if self.debug:
636
+ print('sickleave')
637
+ print(url)
638
+ print(payload)
639
+
640
+ response = requests.post(url=url,
641
+ headers=self._get_headers_allsol(),
642
+ data=json.dumps(payload))
643
+ return response
644
+
645
+ def create_partial_sickleave(self, data):
646
+ """
647
+ Upload partial sick leave data for each entry to all solutions.
648
+ :param data: Data to upload in the request body (the entire row as a dictionary)
649
+ :return: JSON response
650
+ """
651
+ # Required fields that must be present in the entry
652
+ required_fields = ['partieel_verzuim_start_datum', 'percentage', 'sickleave_id']
653
+
654
+ # Check required fields in the data
655
+ self.__check_fields(data=data, required_fields=required_fields)
656
+
657
+ # Map your data fields to the API's expected field names
658
+ api_field_mapping = {
659
+ 'partieel_verzuim_start_datum': 'ap47.ingangsdat',
660
+ 'percentage': 'ap47.prc',
661
+ 'sickleave_code_afas': 'ap47.opm'
662
+ }
663
+
664
+ # Construct the payload for the entry
665
+ payload_entry = {}
666
+ for field, api_field in api_field_mapping.items():
667
+ if field in data and pd.notna(data[field]) and data[field] != '':
668
+ payload_entry[api_field] = data[field]
669
+
670
+ payload = {
671
+ "Data": [payload_entry]
672
+ }
673
+
674
+ # Construct the URL using the sickleave_id from data
675
+ sickleave_id = data['sickleave_id']
676
+ url = f"/mzktml/{sickleave_id}/partieelverzuim"
677
+
678
+ # Make the POST request to the given URL
679
+ response = requests.post(
680
+ url=f"{self.url}{url}",
681
+ headers=self._get_headers_allsol(),
682
+ data=json.dumps(payload)
683
+ )
684
+ # Return the response (JSON)
685
+ return response
686
+
687
+ def update_partial_sickleave(self, data):
688
+ """
689
+ Upload partial sick leave data for each entry to all solutions.
690
+ :param data: Data to upload in the request body (the entire row as a dictionary)
691
+ :return: JSON response
692
+ """
693
+ # Required fields that must be present in the entry
694
+ required_fields = ['partieel_verzuim_start_datum', 'percentage', 'sickleave_id', 'partial_sickleave_id']
695
+
696
+ # Check required fields in the data
697
+ self.__check_fields(data=data, required_fields=required_fields)
698
+
699
+ # Map your data fields to the API's expected field names
700
+ api_field_mapping = {
701
+ 'partieel_verzuim_start_datum': 'ap47.ingangsdat',
702
+ 'percentage': 'ap47.prc',
703
+ 'remarks': 'ap47.opm'
704
+ }
705
+
706
+ # Construct the payload for the entry
707
+ payload_entry = {}
708
+ for field, api_field in api_field_mapping.items():
709
+ if field in data and pd.notna(data[field]) and data[field] != '':
710
+ payload_entry[api_field] = data[field]
711
+
712
+ payload = {
713
+ "Data": [payload_entry]
714
+ }
715
+
716
+ # Construct the URL using the sickleave_id from data
717
+ url = f"/mzktml/{data['sickleave_id']}/partieelverzuim/{data['partial_sickleave_id']}"
718
+
719
+ if self.debug:
720
+ print('partial sickleave')
721
+ print(url)
722
+ print(payload)
723
+
724
+ # Make the POST request to the given URL
725
+ response = requests.put(
726
+ url=f"{self.url}{url}",
727
+ headers=self._get_headers_allsol(),
728
+ data=json.dumps(payload)
729
+ )
730
+ # Return the response (JSON)
731
+ return response
732
+
733
+ def create_manager(self, data: dict) -> json:
734
+ """
735
+ :param data: data to update
736
+ :return: json response
737
+ """
738
+ required_fields = ['employee_id', 'manager_employee_code', 'year', 'week']
739
+ allowed_fields = {"year_to": "ap15.tot-jaar", "week_to": "ap15.tot-week"}
740
+ self.__check_fields(data=data, required_fields=required_fields)
741
+
742
+ payload = {"Data": [{"ap15.jaar": data['year'], "ap15.week": data['week'], "ap15.manager": data['manager_employee_code']}]}
743
+
744
+ # Add allowed fields to the body
745
+ for field in (allowed_fields.keys() & data.keys()):
746
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
747
+
748
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/manager", headers=self._get_headers_allsol(), data=json.dumps(payload))
749
+ if self.debug:
750
+ print(response.content)
751
+ print(payload)
752
+ response.raise_for_status()
753
+
754
+ return response
755
+
756
+ # Put functions
757
+ def update_timetable(self, data: dict) -> json:
758
+ """
759
+ Update hours in all solutions
760
+ :param data: data to update
761
+ :return: json response
762
+ """
763
+ required_fields = ['employee_id', 'hours_per_week', 'start_date', 'timetable_id', 'parttime_factor']
764
+
765
+ rounded_partime_factor = round(data['parttime_factor'], 4)
766
+
767
+ self.__check_fields(data=data, required_fields=required_fields)
768
+ payload = {
769
+ "Data": [
770
+ {
771
+ "ap23.datum": data['start_date'],
772
+ "h-aanw": data['hours_per_week'],
773
+ "h-default1": True,
774
+ "h-aanw2": data['hours_per_week'],
775
+ "h-default2": True,
776
+ "ap23.dt-factor-afas": rounded_partime_factor
777
+
778
+ }
779
+ ]
780
+ }
781
+ if self.debug:
782
+ print('edit')
783
+ print(payload)
784
+ print(data['employee_id_afas'])
785
+ response = requests.put(url=f"{self.url}mperso/{data['employee_id']}/werktijden2wk/{data['timetable_id']}",
786
+ headers=self._get_headers_allsol(),
787
+ data=json.dumps(payload))
788
+ response.raise_for_status()
789
+ return response
790
+
791
+ def update_contract(self, data: dict) -> json:
792
+
793
+ """
794
+ Update person in all solutions
795
+ :param data: data to update
796
+ :return: json response
797
+ """
798
+ required_fields = ['employee_id', 'contract_id', 'tracking_number']
799
+ allowed_fields = {
800
+ "contract_end_date": "ap11.einddat-contract"
801
+ }
802
+
803
+ self.__check_fields(data=data, required_fields=required_fields)
804
+
805
+ payload = {
806
+ "Data": [
807
+ {
808
+ "ap11.vlgnr": data['tracking_number']
809
+ }
810
+ ]
811
+ }
812
+
813
+ # Add allowed fields to the body
814
+ for field in (allowed_fields.keys() & data.keys()):
815
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
816
+
817
+ response = requests.put(url=f"{self.url}mperso/{data['employee_id']}/arbeidsovereenkomsten/{data['contract_id']}",
818
+ headers=self._get_headers_allsol(),
819
+ data=json.dumps(payload))
820
+ if self.debug:
821
+ print(response.content)
822
+ print(payload)
823
+ response.raise_for_status()
824
+
825
+ return response
826
+
827
+ def update_employee(self, data: dict) -> json:
828
+ """
829
+ Update an existing employee in All Solutions
830
+ :param data: data to update
831
+ :return:
832
+ """
833
+ required_fields = ['employee_id']
834
+ allowed_fields = {
835
+ 'employee_code': 'ab02.persnr',
836
+ 'birth_date': 'ab02.geb-dat',
837
+ 'employee_id_afas': "ab02.kenmerk[113]",
838
+ 'date_in_service': 'ab02.indat',
839
+ 'date_in_service_custom': 'ab02.kenmerk[62]',
840
+ 'termination_date': 'ab02.uitdat',
841
+ 'email_work': 'ab02.email-int',
842
+ 'email_private': 'ab02.email',
843
+ 'phone_work': 'ab02.telefoon-int',
844
+ 'mobile_phone_work': 'ab02.mobiel-int',
845
+ 'note': "ab02.notitie-edit",
846
+ 'employment': "ab02.srt-mdw",
847
+ "nickname": "ab02.roepnaam",
848
+ "name_use": "ab02.naamstelling"
849
+ }
850
+
851
+ self.__check_fields(data=data, required_fields=required_fields)
852
+
853
+ payload = {
854
+ "Data": [
855
+ {
856
+ "h-default7": True,
857
+ "h-default6": True, # Find corresponding employee details
858
+ "h-default5": True, # Find name automatically
859
+ "h-corr-adres": True, # save address as correspondence address
860
+ "ab02.contr-srt-kd": "1"
861
+ }
862
+ ]
863
+ }
864
+
865
+ # Add allowed fields to the body
866
+ for field in (allowed_fields.keys() & data.keys()):
867
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
868
+
869
+ response = requests.put(url=f"{self.url}mperso/{data['employee_id']}",
870
+ headers=self._get_headers_allsol(),
871
+ data=json.dumps(payload))
872
+ if self.debug:
873
+ print(response.content)
874
+ print(payload)
875
+ response.raise_for_status()
876
+
877
+ return response
878
+
879
+ def update_person(self, data: dict) -> json:
880
+ """
881
+ Update person in all solutions
882
+ :param data: data to update
883
+ :return: json response
884
+ """
885
+ required_fields = ['person_id']
886
+ allowed_fields = {
887
+ "search_name": "ma01.zoeknaam",
888
+ "employee_id_afas": "ma01.mail-nr",
889
+ "employee_code": "ma01.persnr",
890
+ "birth_date": "ma01.geb-dat",
891
+ "initials": "ma01.voorl",
892
+ "firstname": "ma01.voornaam",
893
+ "nickname": "ma01.roepnaam",
894
+ "prefix": "ma01.voor[1]",
895
+ "prefix_partner": "ma01.voor[2]",
896
+ "city": "ma01.b-wpl",
897
+ "birth_name": "ma01.persoon[1]",
898
+ "lastname_partner": "ma01.persoon[2]",
899
+ "street": "ma01.b-adres",
900
+ "housenumber": "ma01.b-num",
901
+ "housenumber_addition": "ma01.b-appendix",
902
+ "postal_code": "ma01.b-pttkd",
903
+ "note": "ma01.notitie-edit",
904
+ 'gender': "ma01.geslacht"
905
+ }
906
+
907
+ self.__check_fields(data=data, required_fields=required_fields)
908
+
909
+ payload = {
910
+ "Data": [
911
+ {
912
+ "h-default6": True,
913
+ "h-default8": True,
914
+ "ma01.rel-grp": 'Medr'
915
+ }
916
+ ]
917
+ }
918
+
919
+ # Add allowed fields to the body
920
+ for field in (allowed_fields.keys() & data.keys()):
921
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
922
+
923
+ response = requests.put(url=f"{self.url}mrlprs/{data['person_id']}",
924
+ headers=self._get_headers_allsol(),
925
+ data=json.dumps(payload))
926
+ if self.debug:
927
+ print(response.content)
928
+ print(payload)
929
+
930
+ response.raise_for_status()
931
+
932
+ return response
933
+
934
+ def update_costcenter(self, data: dict) -> json:
935
+ """
936
+ Update function in all solutions
937
+ :param data: data to update
938
+ :return: json response
939
+ """
940
+ required_fields = ['employee_id', 'start_year', 'start_week', 'costcenter', 'costcenter_id']
941
+ self.__check_fields(data=data, required_fields=required_fields)
942
+
943
+ allowed_fields = {
944
+ "end_year": "ab09.tot-jaar",
945
+ "end_week": "ab09.tot-per"
946
+ }
947
+
948
+ payload = {
949
+ "Data": [
950
+ {
951
+ "ab09.jaar": data['start_year'],
952
+ "ab09.periode": data['start_week'],
953
+ "ab09.ba-kd": data['costcenter']
954
+ }
955
+ ]
956
+ }
957
+ # Add allowed fields to the body
958
+ for field in (allowed_fields.keys() & data.keys()):
959
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
960
+
961
+ response = requests.put(url=f"{self.url}mperso/{data['employee_id']}/thuisafdelingen/{data['costcenter_id']}",
962
+ headers=self._get_headers_allsol(),
963
+ data=json.dumps(payload))
964
+ if self.debug:
965
+ print(response.content)
966
+ print(payload)
967
+ response.raise_for_status()
968
+
969
+ return response
970
+
971
+ def update_function(self, data: dict) -> json:
972
+ """
973
+ Update department in all solutions
974
+ :param data: data to update
975
+ :return: json response
976
+ """
977
+ required_fields = ['employee_id', 'start_year', 'start_week', 'function', 'function_id']
978
+ self.__check_fields(data=data, required_fields=required_fields)
979
+
980
+ allowed_fields = {
981
+ "end_year": "ab13.tot-jaar",
982
+ "end_week": "ab13.tot-week"
983
+ }
984
+ payload = {
985
+ "Data": [
986
+ {
987
+ "ab13.jaar": data['start_year'],
988
+ "ab13.week": data['start_week'],
989
+ "ab13.funktie": data['function']
990
+ }
991
+ ]
992
+ }
993
+
994
+ # Add allowed fields to the body
995
+ for field in (allowed_fields.keys() & data.keys()):
996
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
997
+
998
+
999
+
1000
+ response = requests.put(url=f"{self.url}mperso/{data['employee_id']}/functies/{data['function_id']}",
1001
+ headers=self._get_headers_allsol(),
1002
+ data=json.dumps(payload))
1003
+ if self.debug:
1004
+ print(response.content)
1005
+ print(payload)
1006
+ response.raise_for_status()
1007
+
1008
+ return response
1009
+
1010
+ def update_worked_hours(self, data: dict) -> json:
1011
+ """
1012
+ :param data: data to update
1013
+ :return: json response
1014
+ """
1015
+ required_fields = ['employee_id', 'id', 'hours']
1016
+ self.__check_fields(data=data, required_fields=required_fields)
1017
+
1018
+ payload = {
1019
+ "Data": [
1020
+ {
1021
+ "h-aanw": data['hours']
1022
+ }
1023
+ ]
1024
+ }
1025
+
1026
+ if self.debug:
1027
+ print(json.dumps(payload))
1028
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/Tijdelijkewerktijden",
1029
+ headers=self._get_headers_allsol(),
1030
+ data=json.dumps(payload))
1031
+ if self.debug:
1032
+ print(response.content)
1033
+ response.raise_for_status()
1034
+
1035
+ return response.json()
1036
+
1037
+ def update_contracts(self, data: dict) -> json:
1038
+ """
1039
+ :param data: data to update
1040
+ :return: json response
1041
+ """
1042
+ required_fields = ['employee_id', 'hours', 'id']
1043
+ self.__check_fields(data=data, required_fields=required_fields)
1044
+
1045
+ payload = {
1046
+ "Data": [
1047
+ {
1048
+ "h-aanw": data['hours']
1049
+ }
1050
+ ]
1051
+ }
1052
+
1053
+ if self.debug:
1054
+ print(json.dumps(payload))
1055
+ response = requests.post(url=f"{self.url}mperso/{data['employee_id']}/arbeidsovereenkomsten/{data['id']}",
1056
+ headers=self._get_headers_allsol(),
1057
+ data=json.dumps(payload))
1058
+ if self.debug:
1059
+ print(response.content)
1060
+ response.raise_for_status()
1061
+
1062
+ return response.json()
1063
+
1064
+ def update_sickleave(self, data):
1065
+ """
1066
+ Update sickleave in all solutions
1067
+ :param data: data to update
1068
+ :return: json response
1069
+ """
1070
+ required_fields = ['employee_code', 'start_date', 'sickleave_id', 'sickleave_code_afas']
1071
+ allowed_fields = {'end_date': "ap46.dat-hervat-arbo"}
1072
+
1073
+ self.__check_fields(data=data, required_fields=required_fields)
1074
+ payload = {
1075
+ "Data": [
1076
+ {
1077
+ "ap46.persnr": data['employee_code'],
1078
+ "ap46.ziektedat": data['start_date'],
1079
+ "ap46.opm": f"Afas koppeling {data['sickleave_code_afas']}"
1080
+ }
1081
+ ]
1082
+ }
1083
+
1084
+ # Add allowed fields to the body
1085
+ for field in (allowed_fields.keys() & data.keys()):
1086
+ payload['Data'][0].update({allowed_fields[field]: data[field]})
1087
+
1088
+ response = requests.put(url=f"{self.url}mzktml/{data['sickleave_id']}",
1089
+ headers=self._get_headers_allsol(),
1090
+ data=json.dumps(payload))
1091
+ response.raise_for_status()
1092
+ return response
1093
+
1094
+ @staticmethod
1095
+ def __check_fields(data: dict, required_fields: List):
1096
+ for field in required_fields:
1097
+ # Check if the field is present
1098
+ if field not in data:
1099
+ raise ValueError(f'Field {field} is required. Required fields are: {tuple(required_fields)}')
1100
+
1101
+ # Check if the value of the field is None or an empty string
1102
+ if data[field] is None or data[field] == '':
1103
+ raise ValueError(f'Field {field} cannot be empty or None. Required fields are: {tuple(required_fields)}')
1104
+
1105
+
1106
+ def format_dates(df: pd.DataFrame, date_cols: List[str]) -> pd.DataFrame:
1107
+ """
1108
+ Parse the columns in *date_cols* to pandas datetime and format them back
1109
+ to ISO‑8601 strings (YYYY‑MM‑DD). Missing/invalid values become "".
1110
+ The function mutates *df* in‑place and also returns it for convenience.
1111
+ """
1112
+ for col in date_cols:
1113
+ df[col] = pd.to_datetime(df[col], errors="coerce")
1114
+ df[col] = df[col].dt.strftime("%Y-%m-%d").fillna("")
1115
+ return df
1116
+
1117
+
1118
+ def build_unique_key(
1119
+ df: pd.DataFrame, *, id_col: str, date_col: str, key_col: str
1120
+ ) -> pd.DataFrame:
1121
+ """
1122
+ Construct a textual unique key of the form <id>_<YYYY‑MM‑DD>.
1123
+ - *id_col* : column holding a unique employee or entity identifier
1124
+ - *date_col* : column with a (string or datetime) date
1125
+ - *key_col* : name of the column to create/replace
1126
+ Mutates *df* in‑place and returns it.
1127
+ """
1128
+ df[key_col] = df[id_col].astype(str) + "_" + df[date_col].astype(str)
1129
+ return df
1130
+
1131
+
1132
+ # ---------------------------------------------------------------------------
1133
+ # OPTIONAL: duplicate‑partial‑rows logger
1134
+ # ---------------------------------------------------------------------------
1135
+ def log_duplicate_partials(
1136
+ df_partial: pd.DataFrame,
1137
+ write_log: Callable[[str], None],
1138
+ subset: str | List[str] = "unique_key_partial",
1139
+ ) -> None:
1140
+ """
1141
+ Detect rows that share the same *subset* key(s) and send a readable
1142
+ message to *write_log* for each duplicate found.
1143
+ Parameters
1144
+ ----------
1145
+ df_partial : DataFrame
1146
+ The partial‑sick‑leave DataFrame.
1147
+ write_log : callable(str)
1148
+ Typically TaskScheduler.write_execution_log or any function that
1149
+ accepts a single string argument.
1150
+ subset : str | list[str]
1151
+ Column(s) that must be unique; defaults to 'unique_key_partial'.
1152
+ """
1153
+ dupes = df_partial[df_partial.duplicated(subset=subset, keep=False)]
1154
+ for _, row in dupes.iterrows():
1155
+ write_log(message=
1156
+ (
1157
+ "Duplicate partial sick‑leave record — "
1158
+ f"employee_id_afas={row.get('employee_id_afas')} "
1159
+ f"employee_code={row.get('employee_code')} "
1160
+ f"key={row.get(subset)}"
1161
+ ), data=None, loglevel="INFO"
1162
+ )
@@ -0,0 +1,10 @@
1
+ Metadata-Version: 1.0
2
+ Name: brynq-sdk-allsolutions
3
+ Version: 1.0.1
4
+ Summary: All solutions wrapper from BrynQ
5
+ Home-page: UNKNOWN
6
+ Author: BrynQ
7
+ Author-email: support@brynq.com
8
+ License: BrynQ License
9
+ Description: All solutions wrapper from BrynQ
10
+ Platform: UNKNOWN
@@ -0,0 +1,8 @@
1
+ setup.py
2
+ brynq_sdk_allsolutions/__init__.py
3
+ brynq_sdk_allsolutions.egg-info/PKG-INFO
4
+ brynq_sdk_allsolutions.egg-info/SOURCES.txt
5
+ brynq_sdk_allsolutions.egg-info/dependency_links.txt
6
+ brynq_sdk_allsolutions.egg-info/not-zip-safe
7
+ brynq_sdk_allsolutions.egg-info/requires.txt
8
+ brynq_sdk_allsolutions.egg-info/top_level.txt
@@ -0,0 +1,2 @@
1
+ brynq-sdk-brynq>=2
2
+ pandas<3.0.0,>=2.2.0
@@ -0,0 +1 @@
1
+ brynq_sdk_allsolutions
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,17 @@
1
+ from setuptools import setup, find_namespace_packages
2
+
3
+ setup(
4
+ name='brynq_sdk_allsolutions',
5
+ version='1.0.1',
6
+ description='All solutions wrapper from BrynQ',
7
+ long_description='All solutions wrapper from BrynQ',
8
+ author='BrynQ',
9
+ author_email='support@brynq.com',
10
+ packages=find_namespace_packages(include=['brynq_sdk*']),
11
+ license='BrynQ License',
12
+ install_requires=[
13
+ 'brynq-sdk-brynq>=2',
14
+ 'pandas>=2.2.0,<3.0.0',
15
+ ],
16
+ zip_safe=False,
17
+ )