ositah 25.6.dev1__py3-none-any.whl → 25.9.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ositah might be problematic. Click here for more details.
- ositah/app.py +17 -17
- ositah/apps/analysis.py +785 -785
- ositah/apps/configuration/callbacks.py +916 -916
- ositah/apps/configuration/main.py +546 -546
- ositah/apps/configuration/parameters.py +74 -74
- ositah/apps/configuration/tools.py +112 -112
- ositah/apps/export.py +1208 -1191
- ositah/apps/validation/callbacks.py +240 -240
- ositah/apps/validation/main.py +89 -89
- ositah/apps/validation/parameters.py +25 -25
- ositah/apps/validation/tables.py +646 -646
- ositah/apps/validation/tools.py +552 -552
- ositah/assets/arrow_down_up.svg +3 -3
- ositah/assets/ositah.css +53 -53
- ositah/assets/sort_ascending.svg +4 -4
- ositah/assets/sort_descending.svg +5 -5
- ositah/assets/sorttable.js +499 -499
- ositah/main.py +449 -449
- ositah/ositah.example.cfg +229 -229
- ositah/static/style.css +53 -53
- ositah/templates/base.html +22 -22
- ositah/templates/bootstrap_login.html +38 -38
- ositah/templates/login_form.html +26 -26
- ositah/utils/agents.py +124 -124
- ositah/utils/authentication.py +287 -287
- ositah/utils/cache.py +19 -19
- ositah/utils/core.py +13 -13
- ositah/utils/exceptions.py +64 -64
- ositah/utils/hito_db.py +51 -51
- ositah/utils/hito_db_model.py +253 -253
- ositah/utils/menus.py +339 -339
- ositah/utils/period.py +139 -139
- ositah/utils/projects.py +1178 -1178
- ositah/utils/teams.py +42 -42
- ositah/utils/utils.py +474 -474
- {ositah-25.6.dev1.dist-info → ositah-25.9.dev1.dist-info}/METADATA +149 -150
- ositah-25.9.dev1.dist-info/RECORD +46 -0
- {ositah-25.6.dev1.dist-info → ositah-25.9.dev1.dist-info}/licenses/LICENSE +29 -29
- ositah-25.6.dev1.dist-info/RECORD +0 -46
- {ositah-25.6.dev1.dist-info → ositah-25.9.dev1.dist-info}/WHEEL +0 -0
- {ositah-25.6.dev1.dist-info → ositah-25.9.dev1.dist-info}/entry_points.txt +0 -0
- {ositah-25.6.dev1.dist-info → ositah-25.9.dev1.dist-info}/top_level.txt +0 -0
ositah/utils/projects.py
CHANGED
|
@@ -1,1178 +1,1178 @@
|
|
|
1
|
-
# Helper functions related to projects and time declarations
|
|
2
|
-
|
|
3
|
-
import re
|
|
4
|
-
from datetime import datetime
|
|
5
|
-
from typing import List, Tuple
|
|
6
|
-
|
|
7
|
-
import numpy as np
|
|
8
|
-
import pandas as pd
|
|
9
|
-
from sqlalchemy.orm import joinedload
|
|
10
|
-
|
|
11
|
-
from ositah.utils.agents import get_agents
|
|
12
|
-
from ositah.utils.cache import clear_cached_data
|
|
13
|
-
from ositah.utils.exceptions import InvalidDataSource, InvalidHitoProjectName
|
|
14
|
-
from ositah.utils.hito_db import get_db
|
|
15
|
-
from ositah.utils.period import get_validation_period_data
|
|
16
|
-
from ositah.utils.utils import (
|
|
17
|
-
DAY_HOURS,
|
|
18
|
-
TEAM_LIST_ALL_AGENTS,
|
|
19
|
-
TIME_UNIT_HOURS,
|
|
20
|
-
TIME_UNIT_HOURS_EN,
|
|
21
|
-
TIME_UNIT_HOURS_FR,
|
|
22
|
-
TIME_UNIT_WEEKS,
|
|
23
|
-
TIME_UNIT_WEEKS_EN,
|
|
24
|
-
TIME_UNIT_WEEKS_FR,
|
|
25
|
-
WEEK_HOURS,
|
|
26
|
-
GlobalParams,
|
|
27
|
-
)
|
|
28
|
-
|
|
29
|
-
CATEGORY_DEFAULT = "nsip_project"
|
|
30
|
-
|
|
31
|
-
DATA_SOURCE_HITO = "hito"
|
|
32
|
-
DATA_SOURCE_OSITAH = "ositah"
|
|
33
|
-
|
|
34
|
-
NSIP_CLASS_OTHER_ACTIVITY = "activitensipreferentiel"
|
|
35
|
-
NSIP_CLASS_PROJECT = "projetnsipreferentiel"
|
|
36
|
-
|
|
37
|
-
MASTERPROJECT_DELETED_ACTIVITY = "Disabled"
|
|
38
|
-
MASTERPROJECT_LOCAL_PROJECT = "Local Projects"
|
|
39
|
-
|
|
40
|
-
NSIP_PROJECT_ORDER = 1
|
|
41
|
-
LOCAL_PROJECT_ORDER = 2
|
|
42
|
-
NSIP_ACIVITY_ORDER = 3
|
|
43
|
-
DISABLED_ACTIVITY_ORDER = 9999
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
def hito2ositah_project_name(hito_name):
|
|
47
|
-
"""
|
|
48
|
-
Split a Hito project name into a masterprojet and project name
|
|
49
|
-
|
|
50
|
-
:param hito_name: Hito name with masterprojet and project name separated by a /
|
|
51
|
-
:return: masterprojet and project name
|
|
52
|
-
"""
|
|
53
|
-
masterproject, project_name = hito_name.split(" / ", 2)
|
|
54
|
-
return masterproject, project_name
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
def ositah2hito_project_name(masterproject, project):
|
|
58
|
-
"""
|
|
59
|
-
Build the Hito/NSIP project name from the masterproject and project
|
|
60
|
-
|
|
61
|
-
:param masterproject: masterproject name
|
|
62
|
-
:param project: project name
|
|
63
|
-
:return: Hito/NSIP project fullname
|
|
64
|
-
"""
|
|
65
|
-
return " / ".join([masterproject, project])
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
def nsip2ositah_project_name(masterproject, project):
|
|
69
|
-
"""
|
|
70
|
-
Build the OSITAH project name from the NSIP project name, removing the master project
|
|
71
|
-
name if it is at the head of the NSIP project name, except if the master project name
|
|
72
|
-
and the project name are identical.
|
|
73
|
-
|
|
74
|
-
:param masterproject: masterproject name
|
|
75
|
-
:param project: project name
|
|
76
|
-
:return: OSITAH project name (without the masterproject name)
|
|
77
|
-
"""
|
|
78
|
-
|
|
79
|
-
if project != masterproject:
|
|
80
|
-
m = re.match(rf"{masterproject}\s+\-\s+(?P<project>.*)", project)
|
|
81
|
-
if m:
|
|
82
|
-
project = m.group("project")
|
|
83
|
-
|
|
84
|
-
return project
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
def category_from_activity(category_patterns, activity) -> str:
|
|
88
|
-
"""
|
|
89
|
-
Return the activity category if the activity matches the pattern. Else an empty string.
|
|
90
|
-
Called as a lambda to build the category column.
|
|
91
|
-
|
|
92
|
-
:param category_patterns: category patterns to match against the activity (dict where
|
|
93
|
-
the key is the pattern and the value is the category)
|
|
94
|
-
:param activity: activity name
|
|
95
|
-
:return: category or np.Nan
|
|
96
|
-
"""
|
|
97
|
-
|
|
98
|
-
for pattern, category in category_patterns.items():
|
|
99
|
-
if re.match(pattern.lower(), activity.lower()):
|
|
100
|
-
return category
|
|
101
|
-
|
|
102
|
-
return np.NaN
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
def activity_from_project(project):
|
|
106
|
-
"""
|
|
107
|
-
Return the activity the project belongs to.
|
|
108
|
-
|
|
109
|
-
:param project: project name
|
|
110
|
-
:return: activity name
|
|
111
|
-
"""
|
|
112
|
-
|
|
113
|
-
global_params = GlobalParams()
|
|
114
|
-
|
|
115
|
-
for activity, pattern in global_params.project_categories.items():
|
|
116
|
-
if re.match(pattern, project):
|
|
117
|
-
return activity
|
|
118
|
-
|
|
119
|
-
return CATEGORY_DEFAULT
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
def reference_masterproject(reference_type):
|
|
123
|
-
"""
|
|
124
|
-
Return an OSITAH masterproject for a NSIP reference, based on its type.
|
|
125
|
-
Masterprojects for each type is defined in the configuration. A reference type
|
|
126
|
-
without a match or with an empty value is ignored (np.NaN returned).
|
|
127
|
-
|
|
128
|
-
:param reference_type: NSIP reference type
|
|
129
|
-
:return: matching master project
|
|
130
|
-
"""
|
|
131
|
-
|
|
132
|
-
global_params = GlobalParams()
|
|
133
|
-
|
|
134
|
-
for type_pattern, masterproject in global_params.reference_masterprojects.items():
|
|
135
|
-
if re.match(type_pattern.lower(), reference_type.lower()):
|
|
136
|
-
if len(masterproject) > 0:
|
|
137
|
-
return masterproject
|
|
138
|
-
else:
|
|
139
|
-
return np.NaN
|
|
140
|
-
|
|
141
|
-
return np.NaN
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
def time_unit(category, short=False, english=True, parenthesis=False) -> str:
|
|
145
|
-
"""
|
|
146
|
-
Return the time unit as defined in the configuration as a string. If the category/column is
|
|
147
|
-
not in the configuration, return an empty string.
|
|
148
|
-
|
|
149
|
-
:param category: project category/class
|
|
150
|
-
:param short: if true, return abbreviated unit names
|
|
151
|
-
:param english: return english unit names if true. Also implies short=False
|
|
152
|
-
:param parenthesis: if True, enclose the string in ()
|
|
153
|
-
:return: time unit for the category as a string
|
|
154
|
-
"""
|
|
155
|
-
|
|
156
|
-
global_params = GlobalParams()
|
|
157
|
-
|
|
158
|
-
if english:
|
|
159
|
-
unit_w = TIME_UNIT_WEEKS_EN
|
|
160
|
-
unit_h = TIME_UNIT_HOURS_EN
|
|
161
|
-
else:
|
|
162
|
-
if short:
|
|
163
|
-
unit_w = "sem."
|
|
164
|
-
unit_h = "h"
|
|
165
|
-
else:
|
|
166
|
-
unit_w = TIME_UNIT_WEEKS_FR
|
|
167
|
-
unit_h = TIME_UNIT_HOURS_FR
|
|
168
|
-
|
|
169
|
-
if category in global_params.time_unit:
|
|
170
|
-
if global_params.time_unit[category] == TIME_UNIT_WEEKS:
|
|
171
|
-
unit_str = unit_w
|
|
172
|
-
elif global_params.time_unit[category] == TIME_UNIT_HOURS:
|
|
173
|
-
unit_str = unit_h
|
|
174
|
-
else:
|
|
175
|
-
raise Exception(
|
|
176
|
-
(
|
|
177
|
-
f"Unsupported time unit '{global_params.time_unit[category]}'"
|
|
178
|
-
f" for category {category}"
|
|
179
|
-
)
|
|
180
|
-
)
|
|
181
|
-
else:
|
|
182
|
-
return ""
|
|
183
|
-
|
|
184
|
-
if parenthesis:
|
|
185
|
-
return f"({unit_str})"
|
|
186
|
-
else:
|
|
187
|
-
return unit_str
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
def category_time_and_unit(category, hours, short=True, english=False) -> Tuple[int, str]:
|
|
191
|
-
"""
|
|
192
|
-
Return the rounded category time in the appropriate unit and the category time unit
|
|
193
|
-
|
|
194
|
-
:param category: project category/class
|
|
195
|
-
:param hours: number of hours
|
|
196
|
-
:param short: if true, return abbreviated unit names
|
|
197
|
-
:param english: return english unit names if true. Also implies short=False
|
|
198
|
-
:return: project time, project unit
|
|
199
|
-
"""
|
|
200
|
-
|
|
201
|
-
global_params = GlobalParams()
|
|
202
|
-
|
|
203
|
-
unit = time_unit(category, short, english)
|
|
204
|
-
|
|
205
|
-
if global_params.time_unit[category] == "w":
|
|
206
|
-
declared_time = f"{int(round(hours / WEEK_HOURS))}"
|
|
207
|
-
else:
|
|
208
|
-
declared_time = f"{int(round(hours))}"
|
|
209
|
-
|
|
210
|
-
return declared_time, unit
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
def project_time(project, hours):
|
|
214
|
-
"""
|
|
215
|
-
Return the rounded project time in the appropriate unit and the project time unit
|
|
216
|
-
|
|
217
|
-
:param project: project name
|
|
218
|
-
:param hours: number of hours
|
|
219
|
-
:return: project time, abbreviated project unit
|
|
220
|
-
"""
|
|
221
|
-
|
|
222
|
-
return category_time_and_unit(activity_from_project(project), hours)
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
def get_team_projects(
|
|
226
|
-
team,
|
|
227
|
-
team_selection_date,
|
|
228
|
-
period_date: datetime,
|
|
229
|
-
source=DATA_SOURCE_HITO,
|
|
230
|
-
use_cache: bool = True,
|
|
231
|
-
):
|
|
232
|
-
"""
|
|
233
|
-
Query the Hito database and return a dataframe will all the project contributions for a given
|
|
234
|
-
team. The dataframe has one row for each each agent contribution to each project.
|
|
235
|
-
|
|
236
|
-
:param team: selected team or TEAM_LIST_ALL_AGENTS for all teams
|
|
237
|
-
:param team_selection_date: last time the team selection was changed
|
|
238
|
-
:param period_date: a date that must be inside the declaration period
|
|
239
|
-
:param source: whether to use Hito (non validated) or OSITAH (validated) as a data source
|
|
240
|
-
:param use_cache: if true, use the cache if defined and up-to-date or update it with the
|
|
241
|
-
new declarations
|
|
242
|
-
:return: dataframe or None if the query returned no entry
|
|
243
|
-
"""
|
|
244
|
-
|
|
245
|
-
from ositah.utils.hito_db_model import (
|
|
246
|
-
ActiviteDetail,
|
|
247
|
-
Agent,
|
|
248
|
-
OSITAHProjectDeclaration,
|
|
249
|
-
OSITAHValidation,
|
|
250
|
-
Projet,
|
|
251
|
-
Team,
|
|
252
|
-
)
|
|
253
|
-
|
|
254
|
-
global_params = GlobalParams()
|
|
255
|
-
columns = global_params.columns
|
|
256
|
-
session_data = global_params.session_data
|
|
257
|
-
db = get_db()
|
|
258
|
-
|
|
259
|
-
validation_period = get_validation_period_data(period_date)
|
|
260
|
-
|
|
261
|
-
# Check if there is a cached version
|
|
262
|
-
if session_data.project_declarations is not None and use_cache:
|
|
263
|
-
if (
|
|
264
|
-
session_data.project_declarations_source is None
|
|
265
|
-
or source != session_data.project_declarations_source
|
|
266
|
-
or datetime.fromisoformat(team_selection_date) > session_data.cache_date
|
|
267
|
-
):
|
|
268
|
-
# Cache must be refreshed if the selected source doesn't match the cached one or if
|
|
269
|
-
# the team has been modified since the cache was loaded (required for multi-worker
|
|
270
|
-
# configurations as the team selection does not necessarily happen on the same worker
|
|
271
|
-
# than the later processing). In a multi-worker configuration is used it may also
|
|
272
|
-
# happen that the declaration source is not defined if it was initially initialised
|
|
273
|
-
# on another worker
|
|
274
|
-
clear_cached_data()
|
|
275
|
-
else:
|
|
276
|
-
return session_data.project_declarations
|
|
277
|
-
|
|
278
|
-
if source == DATA_SOURCE_OSITAH:
|
|
279
|
-
# The query relies on the fact that only one validation entry can be in the validated
|
|
280
|
-
# state for a given period, something enforced by the declaration validation.
|
|
281
|
-
# When a team is specified, display all projects from this team and the children teams
|
|
282
|
-
query = (
|
|
283
|
-
OSITAHProjectDeclaration.query.join(
|
|
284
|
-
OSITAHValidation,
|
|
285
|
-
OSITAHProjectDeclaration.validation_id == OSITAHValidation.id,
|
|
286
|
-
)
|
|
287
|
-
.join(Agent, Agent.id == OSITAHValidation.agent_id)
|
|
288
|
-
.join(Team, Team.id == Agent.team_id)
|
|
289
|
-
.add_entity(Agent)
|
|
290
|
-
.add_entity(Team)
|
|
291
|
-
.add_entity(OSITAHValidation)
|
|
292
|
-
.filter(OSITAHValidation.validated)
|
|
293
|
-
.filter(OSITAHValidation.period_id == validation_period.id)
|
|
294
|
-
)
|
|
295
|
-
if team != TEAM_LIST_ALL_AGENTS:
|
|
296
|
-
query = query.filter(Team.nom.ilike(f"{team}%"))
|
|
297
|
-
declarations = pd.read_sql(query.statement, db.session.bind)
|
|
298
|
-
if len(declarations) == 0:
|
|
299
|
-
return None
|
|
300
|
-
declarations.rename(columns={"id": columns["activity_id"]}, inplace=True)
|
|
301
|
-
declarations.rename(columns={"id_1": columns["agent_id"]}, inplace=True)
|
|
302
|
-
declarations.rename(columns={"nom_1": columns["team"]}, inplace=True)
|
|
303
|
-
declarations.rename(columns={"hours": columns["hours"]}, inplace=True)
|
|
304
|
-
declarations.rename(columns={"id_3": "validation_id"}, inplace=True)
|
|
305
|
-
declarations.rename(columns={"timestamp": "validation_time"}, inplace=True)
|
|
306
|
-
# Drop statut column to avoid conflicts in future merge with the Agent table
|
|
307
|
-
declarations.drop(columns=["statut"], inplace=True)
|
|
308
|
-
# Ensure that email_auth is defined and if it is not, replace it by the email.
|
|
309
|
-
declarations.loc[declarations[columns["email_auth"]].isna(), columns["email_auth"]] = (
|
|
310
|
-
declarations[columns["email"]]
|
|
311
|
-
)
|
|
312
|
-
declarations[columns["activity"]] = declarations.apply(
|
|
313
|
-
lambda row: ositah2hito_project_name(
|
|
314
|
-
row[columns["masterproject"]], row[columns["project"]]
|
|
315
|
-
),
|
|
316
|
-
axis=1,
|
|
317
|
-
)
|
|
318
|
-
|
|
319
|
-
elif source == DATA_SOURCE_HITO:
|
|
320
|
-
# For team names, we want to keep the agent team name instead of the team_name in
|
|
321
|
-
# activity_details so it must be specified explicitely in the join with Team table
|
|
322
|
-
query = (
|
|
323
|
-
ActiviteDetail.query.join(Projet)
|
|
324
|
-
.join(Agent)
|
|
325
|
-
.join(Team, Team.id == Agent.team_id)
|
|
326
|
-
.add_entity(Projet)
|
|
327
|
-
.add_entity(Agent)
|
|
328
|
-
.add_entity(Team)
|
|
329
|
-
.filter(
|
|
330
|
-
ActiviteDetail.date >= validation_period.start_date,
|
|
331
|
-
ActiviteDetail.date <= validation_period.end_date,
|
|
332
|
-
)
|
|
333
|
-
)
|
|
334
|
-
if team != TEAM_LIST_ALL_AGENTS:
|
|
335
|
-
query = query.filter(Team.nom.ilike(f"{team}%"))
|
|
336
|
-
daily_declarations = pd.read_sql(query.statement, db.session.bind)
|
|
337
|
-
if len(daily_declarations) == 0:
|
|
338
|
-
return None
|
|
339
|
-
# Pandas add a suffix to duplicate column names, the first one being unchanged, the
|
|
340
|
-
# second being suffixed _1...
|
|
341
|
-
daily_declarations.drop(columns=["id", "id_1", "id_2"], inplace=True)
|
|
342
|
-
daily_declarations.rename(columns={"agent_id": columns["agent_id"]}, inplace=True)
|
|
343
|
-
daily_declarations.rename(columns={"libelle": columns["activity"]}, inplace=True)
|
|
344
|
-
daily_declarations.rename(columns={"nom_1": columns["team"]}, inplace=True)
|
|
345
|
-
for column in [columns["hours"], columns["percent"]]:
|
|
346
|
-
daily_declarations[column] = daily_declarations[column].astype(float)
|
|
347
|
-
# Ensure that email_auth is defined and if it is not, replace it by the email. If left
|
|
348
|
-
# undefined, the entries will not be present in the pivot table as it is part of the index.
|
|
349
|
-
daily_declarations.loc[
|
|
350
|
-
daily_declarations[columns["email_auth"]].isna(), columns["email_auth"]
|
|
351
|
-
] = daily_declarations[columns["email"]]
|
|
352
|
-
# Rebuild agent quotite by comparing the time declared with the percent computed by Hito
|
|
353
|
-
# based on the quotite
|
|
354
|
-
daily_declarations[columns["quotite"]] = (
|
|
355
|
-
daily_declarations[columns["hours"]] / DAY_HOURS * 100
|
|
356
|
-
) / daily_declarations[columns["percent"]]
|
|
357
|
-
global_declarations_pt = pd.pivot_table(
|
|
358
|
-
daily_declarations,
|
|
359
|
-
index=[
|
|
360
|
-
columns["lastname"],
|
|
361
|
-
columns["firstname"],
|
|
362
|
-
columns["activity"],
|
|
363
|
-
columns["activity_id"],
|
|
364
|
-
columns["team"],
|
|
365
|
-
columns["agent_id"],
|
|
366
|
-
columns["email_auth"],
|
|
367
|
-
columns["email"],
|
|
368
|
-
],
|
|
369
|
-
values=[columns["hours"], columns["quotite"]],
|
|
370
|
-
aggfunc={columns["hours"]: "sum", columns["quotite"]: "mean"},
|
|
371
|
-
)
|
|
372
|
-
declarations = pd.DataFrame(global_declarations_pt.to_records())
|
|
373
|
-
declarations[[columns["masterproject"], columns["project"]]] = declarations[
|
|
374
|
-
columns["activity"]
|
|
375
|
-
].str.split(" / ", n=1, expand=True)
|
|
376
|
-
# An entry in the pseudo master project MASTERPROJECT_DELETED_ACTIVITY is a special
|
|
377
|
-
# case corresponding to deleted NSIP projects: the real name is in the project part that
|
|
378
|
-
# must be parsed as for any other project
|
|
379
|
-
declarations["project_saved"] = np.NaN
|
|
380
|
-
declarations["project_saved"] = declarations["project_saved"].astype("object")
|
|
381
|
-
declarations.loc[
|
|
382
|
-
declarations[columns["masterproject"]] == MASTERPROJECT_DELETED_ACTIVITY,
|
|
383
|
-
"project_saved",
|
|
384
|
-
] = declarations[columns["project"]]
|
|
385
|
-
# Not sure why the following line doesn't work (masterproject and project set to NaN
|
|
386
|
-
# if no row matches the indexing condition... An issue has been open:
|
|
387
|
-
# https://github.com/pandas-dev/pandas/issues/44726.
|
|
388
|
-
# declarations.loc[
|
|
389
|
-
# declarations.project_saved.notna(),
|
|
390
|
-
# [columns["masterproject"], columns["project"]],
|
|
391
|
-
# ] = declarations.project_saved.str.split(" / ", n=1, expand=True)
|
|
392
|
-
#
|
|
393
|
-
# The following workaround fails if project_saved contains only np.NaN. It is a known
|
|
394
|
-
# issue in Panda 1.3.4, see https://github.com/pandas-dev/pandas/issues/35807
|
|
395
|
-
# declarations[
|
|
396
|
-
# ["newmaster", "newproject"]
|
|
397
|
-
# ] = declarations.project_saved.str.split(" / ", n=1, expand=True)
|
|
398
|
-
#
|
|
399
|
-
# Workaround based on
|
|
400
|
-
# https://github.com/pandas-dev/pandas/issues/35807#issuecomment-676912441. If no row
|
|
401
|
-
# matches the condition, only one column is created thus the need to check they all
|
|
402
|
-
# exist.
|
|
403
|
-
tmp_columns = ["newmaster", "newproject"]
|
|
404
|
-
saved_projects = (
|
|
405
|
-
declarations["project_saved"]
|
|
406
|
-
.str.split("/", expand=True, n=len(tmp_columns) - 1)
|
|
407
|
-
.rename(columns={k: name for k, name in enumerate(tmp_columns)})
|
|
408
|
-
)
|
|
409
|
-
for column in tmp_columns:
|
|
410
|
-
if column not in saved_projects.columns:
|
|
411
|
-
saved_projects[column] = np.NaN
|
|
412
|
-
declarations = declarations.join(saved_projects)
|
|
413
|
-
declarations.loc[declarations.project_saved.notna(), columns["masterproject"]] = (
|
|
414
|
-
declarations.newmaster
|
|
415
|
-
)
|
|
416
|
-
declarations.loc[declarations.project_saved.notna(), columns["project"]] = (
|
|
417
|
-
declarations.newproject
|
|
418
|
-
)
|
|
419
|
-
declarations.drop(columns=["newmaster", "newproject"], inplace=True)
|
|
420
|
-
declarations.loc[declarations.project_saved.notna(), columns["activity"]] = (
|
|
421
|
-
declarations.project_saved
|
|
422
|
-
)
|
|
423
|
-
|
|
424
|
-
# Detect project names not matching the format "masterproject / project"
|
|
425
|
-
invalid_hito_projects = declarations.loc[declarations[columns["project"]].isnull()]
|
|
426
|
-
if not invalid_hito_projects.empty:
|
|
427
|
-
raise InvalidHitoProjectName(
|
|
428
|
-
pd.Series(invalid_hito_projects[columns["masterproject"]]).unique()
|
|
429
|
-
)
|
|
430
|
-
|
|
431
|
-
else:
|
|
432
|
-
raise InvalidDataSource(source)
|
|
433
|
-
|
|
434
|
-
declarations[columns["fullname"]] = declarations[
|
|
435
|
-
[columns["lastname"], columns["firstname"]]
|
|
436
|
-
].agg(" ".join, axis=1)
|
|
437
|
-
declarations[columns["category"]] = declarations.apply(
|
|
438
|
-
lambda row: category_from_activity(
|
|
439
|
-
global_params.category_patterns, row[columns["activity"]]
|
|
440
|
-
),
|
|
441
|
-
axis=1,
|
|
442
|
-
)
|
|
443
|
-
declarations.loc[declarations[columns["category"]].isna(), "category"] = CATEGORY_DEFAULT
|
|
444
|
-
|
|
445
|
-
# Check quotite < 50% and flag the entry as suspect (generally means confusion between quotite
|
|
446
|
-
# and percent during declaration)
|
|
447
|
-
declarations["suspect"] = declarations[columns["quotite"]] < 0.5
|
|
448
|
-
|
|
449
|
-
if use_cache:
|
|
450
|
-
session_data.set_project_declarations(declarations, source)
|
|
451
|
-
|
|
452
|
-
return declarations
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
def get_all_hito_activities(project_activity: bool):
|
|
456
|
-
"""
|
|
457
|
-
Retrieve all projects or all activities defined in Hito with their associated teams
|
|
458
|
-
|
|
459
|
-
:param project_activity: if true, return all projects, else all Hito activities
|
|
460
|
-
:return: dataframe
|
|
461
|
-
"""
|
|
462
|
-
|
|
463
|
-
from ositah.utils.hito_db_model import Activite, Projet
|
|
464
|
-
|
|
465
|
-
global_params = GlobalParams()
|
|
466
|
-
session_data = global_params.session_data
|
|
467
|
-
db = get_db()
|
|
468
|
-
|
|
469
|
-
# Check if there is a cached version
|
|
470
|
-
if session_data.get_hito_activities(project_activity) is not None:
|
|
471
|
-
return session_data.get_hito_activities(project_activity)
|
|
472
|
-
|
|
473
|
-
else:
|
|
474
|
-
if project_activity:
|
|
475
|
-
Activity = Projet
|
|
476
|
-
else:
|
|
477
|
-
Activity = Activite
|
|
478
|
-
|
|
479
|
-
query = Activity.query.options(joinedload(Activity.teams))
|
|
480
|
-
activities = pd.read_sql(query.statement, db.session.bind)
|
|
481
|
-
activities[["masterproject", "project"]] = activities.libelle.str.split(
|
|
482
|
-
" / ", n=1, expand=True
|
|
483
|
-
)
|
|
484
|
-
activities.rename(columns={"description_1": "team_description"}, inplace=True)
|
|
485
|
-
activities.rename(columns={"nom": "team_name"}, inplace=True)
|
|
486
|
-
|
|
487
|
-
session_data.set_hito_activities(activities, project_activity)
|
|
488
|
-
|
|
489
|
-
return activities
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
def build_projects_data(team, team_selection_date, period_date: str, source):
|
|
493
|
-
"""
|
|
494
|
-
Build the project list contributed by the selected team and return it as a dataframe
|
|
495
|
-
|
|
496
|
-
:param team: selected team
|
|
497
|
-
:param team_selection_date: last time the team selection was changed
|
|
498
|
-
:param period_date: a date that must be inside the declaration period
|
|
499
|
-
:param source: whether to use Hito (non validated) or OSITAH (validated) as a data source
|
|
500
|
-
:return: dataframe with projects data, dataframe with agent declarations
|
|
501
|
-
"""
|
|
502
|
-
|
|
503
|
-
global_params = GlobalParams()
|
|
504
|
-
columns = global_params.columns
|
|
505
|
-
session_data = global_params.session_data
|
|
506
|
-
|
|
507
|
-
declaration_list = get_team_projects(team, team_selection_date, period_date, source)
|
|
508
|
-
if declaration_list is None:
|
|
509
|
-
return None, None
|
|
510
|
-
|
|
511
|
-
projects_data = session_data.projects_data
|
|
512
|
-
if projects_data is None:
|
|
513
|
-
projects_data_pt = pd.pivot_table(
|
|
514
|
-
declaration_list,
|
|
515
|
-
index=[
|
|
516
|
-
columns["masterproject"],
|
|
517
|
-
columns["project"],
|
|
518
|
-
columns["activity"],
|
|
519
|
-
columns["category"],
|
|
520
|
-
],
|
|
521
|
-
values=[columns["hours"]],
|
|
522
|
-
aggfunc={columns["hours"]: "sum"},
|
|
523
|
-
)
|
|
524
|
-
projects_data = pd.DataFrame(projects_data_pt.to_records())
|
|
525
|
-
projects_data[columns["hours"]] = np.round(projects_data[columns["hours"]]).astype("int")
|
|
526
|
-
projects_data[columns["weeks"]] = np.round(projects_data[columns["hours"]] / WEEK_HOURS, 1)
|
|
527
|
-
short_name_len = 25
|
|
528
|
-
projects_data["project_short"] = projects_data[columns["project"]]
|
|
529
|
-
projects_data.loc[
|
|
530
|
-
projects_data["project_short"].str.len() > short_name_len, "project_short"
|
|
531
|
-
] = projects_data["project_short"].str.slice_replace(start=short_name_len - 4, repl="...")
|
|
532
|
-
session_data.projects_data = projects_data
|
|
533
|
-
|
|
534
|
-
return projects_data, declaration_list
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
def get_hito_nsip_activities(project_activity: bool = True):
|
|
538
|
-
"""
|
|
539
|
-
Return a dataframe with all the NSIP activities defined in Hito. Activities can be
|
|
540
|
-
either projects or "references" (other activities). An activity is considered as a
|
|
541
|
-
NSIP activity if it has a matching entry in Hito referentiel.
|
|
542
|
-
|
|
543
|
-
:param project_activity: if true, return projects, else references
|
|
544
|
-
:return: dataframe
|
|
545
|
-
"""
|
|
546
|
-
|
|
547
|
-
from ositah.utils.hito_db_model import Projet, Referentiel
|
|
548
|
-
|
|
549
|
-
db = get_db()
|
|
550
|
-
|
|
551
|
-
if project_activity:
|
|
552
|
-
project_join_id = Projet.projet_nsip_referentiel_id
|
|
553
|
-
referentiel_class = "projetnsipreferentiel"
|
|
554
|
-
else:
|
|
555
|
-
project_join_id = Projet.activite_nsip_referentiel_id
|
|
556
|
-
referentiel_class = "activitensipreferentiel"
|
|
557
|
-
|
|
558
|
-
activity_query = (
|
|
559
|
-
Projet.query.join(Referentiel, Referentiel.id == project_join_id)
|
|
560
|
-
.add_entity(Referentiel)
|
|
561
|
-
.filter(
|
|
562
|
-
Referentiel.object_class == referentiel_class,
|
|
563
|
-
)
|
|
564
|
-
)
|
|
565
|
-
|
|
566
|
-
activities = pd.read_sql(activity_query.statement, db.session.bind)
|
|
567
|
-
|
|
568
|
-
activities.drop(
|
|
569
|
-
columns=[
|
|
570
|
-
"ordre",
|
|
571
|
-
"projet_nsip_referentiel_id",
|
|
572
|
-
"activite_nsip_referentiel_id",
|
|
573
|
-
],
|
|
574
|
-
inplace=True,
|
|
575
|
-
)
|
|
576
|
-
activities.rename(columns={"id_1": "referentiel_id"}, inplace=True)
|
|
577
|
-
activities.rename(columns={"libelle_1": "nsip_name_id"}, inplace=True)
|
|
578
|
-
|
|
579
|
-
if activities.empty:
|
|
580
|
-
activities[["nsip_master", "nsip_project", "nsip_project_id", "nsip_reference_id"]] = [
|
|
581
|
-
np.NaN,
|
|
582
|
-
np.NaN,
|
|
583
|
-
np.NaN,
|
|
584
|
-
np.NaN,
|
|
585
|
-
]
|
|
586
|
-
else:
|
|
587
|
-
activities[["nsip_master", "nsip_project", "nsip_project_id", "nsip_reference_id"]] = (
|
|
588
|
-
activities.apply(
|
|
589
|
-
lambda v: nsip_activity_name_id(v["nsip_name_id"], v["class"]),
|
|
590
|
-
axis=1,
|
|
591
|
-
result_type="expand",
|
|
592
|
-
)
|
|
593
|
-
)
|
|
594
|
-
activities["nsip_project_id"] = activities["nsip_project_id"].astype(int)
|
|
595
|
-
activities["nsip_reference_id"] = activities["nsip_reference_id"].astype(int)
|
|
596
|
-
|
|
597
|
-
return activities
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
def get_hito_projects():
|
|
601
|
-
"""
|
|
602
|
-
Return a dataframe with the information about all projects with validated declarations
|
|
603
|
-
in the current declaration period defined in Hito and their relationship to NSIP, if relevant.
|
|
604
|
-
|
|
605
|
-
:return: Hito project dataframe
|
|
606
|
-
"""
|
|
607
|
-
|
|
608
|
-
from ositah.utils.hito_db_model import (
|
|
609
|
-
OSITAHProjectDeclaration,
|
|
610
|
-
OSITAHValidation,
|
|
611
|
-
Projet,
|
|
612
|
-
Referentiel,
|
|
613
|
-
)
|
|
614
|
-
|
|
615
|
-
db = get_db()
|
|
616
|
-
|
|
617
|
-
projects_query = (
|
|
618
|
-
Projet.query.join(Referentiel, Referentiel.id == Projet.projet_nsip_referentiel_id)
|
|
619
|
-
.join(OSITAHProjectDeclaration)
|
|
620
|
-
.join(OSITAHValidation)
|
|
621
|
-
.add_entity(Referentiel)
|
|
622
|
-
.add_entity(OSITAHValidation)
|
|
623
|
-
.filter(
|
|
624
|
-
Referentiel.object_class == "projetnsipreferentiel",
|
|
625
|
-
OSITAHValidation.validated,
|
|
626
|
-
)
|
|
627
|
-
)
|
|
628
|
-
projects = pd.read_sql(projects_query.statement, db.session.bind)
|
|
629
|
-
|
|
630
|
-
activities_query = (
|
|
631
|
-
Projet.query.join(Referentiel, Referentiel.id == Projet.activite_nsip_referentiel_id)
|
|
632
|
-
.join(OSITAHProjectDeclaration)
|
|
633
|
-
.join(OSITAHValidation)
|
|
634
|
-
.add_entity(Referentiel)
|
|
635
|
-
.add_entity(OSITAHValidation)
|
|
636
|
-
.filter(
|
|
637
|
-
Referentiel.object_class == "activitensipreferentiel",
|
|
638
|
-
OSITAHValidation.validated,
|
|
639
|
-
)
|
|
640
|
-
)
|
|
641
|
-
activities = pd.read_sql(activities_query.statement, db.session.bind)
|
|
642
|
-
|
|
643
|
-
projects_activities = pd.concat([projects, activities], ignore_index=True)
|
|
644
|
-
|
|
645
|
-
projects_activities.drop(
|
|
646
|
-
columns=[
|
|
647
|
-
"id_2",
|
|
648
|
-
"ordre",
|
|
649
|
-
"projet_nsip_referentiel_id",
|
|
650
|
-
"activite_nsip_referentiel_id",
|
|
651
|
-
"agent_id",
|
|
652
|
-
],
|
|
653
|
-
inplace=True,
|
|
654
|
-
)
|
|
655
|
-
projects_activities.rename(columns={"id_1": "referentiel_id"}, inplace=True)
|
|
656
|
-
projects_activities.rename(columns={"libelle_1": "nsip_name_id"}, inplace=True)
|
|
657
|
-
projects_activities.drop_duplicates(subset=["id"], inplace=True)
|
|
658
|
-
|
|
659
|
-
if projects_activities.empty:
|
|
660
|
-
projects_activities[
|
|
661
|
-
["nsip_master", "nsip_project", "nsip_project_id", "nsip_reference_id"]
|
|
662
|
-
] = [np.NaN, np.NaN, np.NaN, np.NaN]
|
|
663
|
-
else:
|
|
664
|
-
projects_activities[
|
|
665
|
-
["nsip_master", "nsip_project", "nsip_project_id", "nsip_reference_id"]
|
|
666
|
-
] = projects_activities.apply(
|
|
667
|
-
lambda v: nsip_activity_name_id(v["nsip_name_id"], v["class"]),
|
|
668
|
-
axis=1,
|
|
669
|
-
result_type="expand",
|
|
670
|
-
)
|
|
671
|
-
projects_activities["nsip_project_id"] = projects_activities["nsip_project_id"].astype(int)
|
|
672
|
-
projects_activities["nsip_reference_id"] = projects_activities["nsip_reference_id"].astype(
|
|
673
|
-
int
|
|
674
|
-
)
|
|
675
|
-
|
|
676
|
-
return projects_activities
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
def nsip_activity_name_id(hito_name: str, type: str) -> List[str]:
|
|
680
|
-
"""
|
|
681
|
-
Split the NISP activity project name in Hito referentiel in 3 parts: masterproject name,
|
|
682
|
-
project name, project ID and return the project ID as the project ID (3d value) or reference
|
|
683
|
-
ID (4th value) depending on the activity type. The unused ID is set to 0 rather than np.NaN
|
|
684
|
-
or pd.NA as the column may be used in merges.
|
|
685
|
-
|
|
686
|
-
:param hito_name: activity name in Hito referentiel
|
|
687
|
-
:param type: referentiel class
|
|
688
|
-
:return:
|
|
689
|
-
"""
|
|
690
|
-
|
|
691
|
-
m = re.match(
|
|
692
|
-
r"(?P<master>.*?)\s+/\s+(?P<project>.*)\s+\(NSIP ID:\s*(?P<id>\w+)\)$",
|
|
693
|
-
hito_name,
|
|
694
|
-
)
|
|
695
|
-
if m:
|
|
696
|
-
try:
|
|
697
|
-
_ = int(m.group("id"))
|
|
698
|
-
except ValueError:
|
|
699
|
-
print(
|
|
700
|
-
(
|
|
701
|
-
f"ERROR: invalid NSIP ID in Hito referentiel for '{m.group('master')} /"
|
|
702
|
-
f" {m.group('project')}' (ID={m.group('id')})"
|
|
703
|
-
)
|
|
704
|
-
)
|
|
705
|
-
return m.group("master"), m.group("project"), 0, 0
|
|
706
|
-
if type == NSIP_CLASS_PROJECT:
|
|
707
|
-
project_id = m.group("id")
|
|
708
|
-
reference_id = 0
|
|
709
|
-
else:
|
|
710
|
-
project_id = 0
|
|
711
|
-
reference_id = m.group("id")
|
|
712
|
-
return m.group("master"), m.group("project"), project_id, reference_id
|
|
713
|
-
else:
|
|
714
|
-
print(
|
|
715
|
-
(
|
|
716
|
-
f"ERROR: invalid Hito referentiel entry format, cannot be parsed as"
|
|
717
|
-
f" master/project/id ({hito_name})"
|
|
718
|
-
)
|
|
719
|
-
)
|
|
720
|
-
return np.NaN, np.NaN, 0, 0
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
def get_nsip_declarations(period_date: str, team: str):
|
|
724
|
-
"""
|
|
725
|
-
Return the NSIP declaration list for the declaration period matching a given date (the
|
|
726
|
-
date must be included in the period) as a dataframe
|
|
727
|
-
|
|
728
|
-
:param period_date: date that must be inside the period
|
|
729
|
-
:param team: selected team
|
|
730
|
-
:return: declaration list as a dataframe
|
|
731
|
-
"""
|
|
732
|
-
|
|
733
|
-
global_params = GlobalParams()
|
|
734
|
-
|
|
735
|
-
if global_params.nsip:
|
|
736
|
-
declarations = pd.json_normalize(global_params.nsip.get_declarations(period_date))
|
|
737
|
-
if declarations.empty:
|
|
738
|
-
return declarations
|
|
739
|
-
|
|
740
|
-
declarations.rename(columns={"id": "id_declaration"}, inplace=True)
|
|
741
|
-
# Set NaN to 0 in reference as np.NaN is a float and prevent casting to int. As it will
|
|
742
|
-
# be used in a merge better to have a 0 than a NaN.
|
|
743
|
-
if "project.id" in declarations.columns:
|
|
744
|
-
declarations.loc[declarations["project.id"].isna(), "project.id"] = 0
|
|
745
|
-
declarations["project.id"] = declarations["project.id"].astype(int)
|
|
746
|
-
else:
|
|
747
|
-
declarations["project.id"] = 0
|
|
748
|
-
if "reference.id" in declarations.columns:
|
|
749
|
-
declarations.loc[declarations["reference.id"].isna(), "reference.id"] = 0
|
|
750
|
-
declarations["reference.id"] = declarations["reference.id"].astype(int)
|
|
751
|
-
else:
|
|
752
|
-
declarations["reference.id"] = 0
|
|
753
|
-
declarations["nsip_fullname"] = (
|
|
754
|
-
declarations["agent.lastname"] + " " + declarations["agent.firstname"]
|
|
755
|
-
)
|
|
756
|
-
|
|
757
|
-
if team != TEAM_LIST_ALL_AGENTS:
|
|
758
|
-
team_agents = get_agents(period_date, team)
|
|
759
|
-
agent_emails = team_agents["email_auth"]
|
|
760
|
-
declarations = declarations.merge(
|
|
761
|
-
agent_emails,
|
|
762
|
-
how="inner",
|
|
763
|
-
left_on="agent.email",
|
|
764
|
-
right_on="email_auth",
|
|
765
|
-
suffixes=[None, "_agent"],
|
|
766
|
-
)
|
|
767
|
-
|
|
768
|
-
return declarations
|
|
769
|
-
|
|
770
|
-
else:
|
|
771
|
-
return None
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
def get_nsip_activities(project_activity: bool):
|
|
775
|
-
"""
|
|
776
|
-
Retrieve laboratory activities defined in NSIP and return them in a dataframe.
|
|
777
|
-
Activities can be either projects or references (other activities).
|
|
778
|
-
|
|
779
|
-
:param project_activity: true for projects, false for other activities
|
|
780
|
-
:return: dataframe or None if NSIP is not configured
|
|
781
|
-
"""
|
|
782
|
-
|
|
783
|
-
global_params = GlobalParams()
|
|
784
|
-
|
|
785
|
-
if global_params.nsip:
|
|
786
|
-
activities = pd.json_normalize(
|
|
787
|
-
global_params.nsip.get_activities(project_activity), record_prefix=True
|
|
788
|
-
)
|
|
789
|
-
if not activities.empty:
|
|
790
|
-
if project_activity:
|
|
791
|
-
activities["ositah_name"] = activities.apply(
|
|
792
|
-
lambda p: nsip2ositah_project_name(p["master_project.name"], p["name"]),
|
|
793
|
-
axis=1,
|
|
794
|
-
)
|
|
795
|
-
else:
|
|
796
|
-
activities["master_project.name"] = activities.apply(
|
|
797
|
-
lambda p: reference_masterproject(p["type"]),
|
|
798
|
-
axis=1,
|
|
799
|
-
)
|
|
800
|
-
activities.drop(
|
|
801
|
-
activities[activities["master_project.name"].isna()].index,
|
|
802
|
-
inplace=True,
|
|
803
|
-
)
|
|
804
|
-
activities["ositah_name"] = activities["name"]
|
|
805
|
-
|
|
806
|
-
return activities
|
|
807
|
-
else:
|
|
808
|
-
return None
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
def build_activity_libelle(
|
|
812
|
-
nsip_id: str,
|
|
813
|
-
master_project: str,
|
|
814
|
-
project: str,
|
|
815
|
-
):
|
|
816
|
-
"""
|
|
817
|
-
Build Hito project name and referentiel entry name from NSIP master project, project name and
|
|
818
|
-
project id.
|
|
819
|
-
|
|
820
|
-
:param nsip_id: NSIP ID for the project
|
|
821
|
-
:param master_project: master project name
|
|
822
|
-
:param project: project name
|
|
823
|
-
:return: Hito project name, Hito referentiel name
|
|
824
|
-
"""
|
|
825
|
-
|
|
826
|
-
new_project_name = f"{master_project} / {project}"
|
|
827
|
-
new_referentiel_name = f"{new_project_name} (NSIP ID: {nsip_id})"
|
|
828
|
-
return new_project_name, new_referentiel_name
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
def update_activity_name(
|
|
832
|
-
hito_project_id: str,
|
|
833
|
-
hito_referentiel_id: str,
|
|
834
|
-
nsip_id: str,
|
|
835
|
-
master_project: str,
|
|
836
|
-
project: str,
|
|
837
|
-
):
|
|
838
|
-
"""
|
|
839
|
-
Update a project name in Hito, both in the referentiel and in the project/activity table.
|
|
840
|
-
|
|
841
|
-
:param hito_project_id: Hito project ID
|
|
842
|
-
:param hito_referentiel_id: Hito referentiel ID for the project
|
|
843
|
-
:param nsip_id: NSIP ID for the project
|
|
844
|
-
:param master_project: master project name
|
|
845
|
-
:param project: project name
|
|
846
|
-
:return: 0 if update succeeded, non-zero if an error occured, error_msg if
|
|
847
|
-
an error occured
|
|
848
|
-
"""
|
|
849
|
-
|
|
850
|
-
from ositah.utils.hito_db_model import Projet, Referentiel
|
|
851
|
-
|
|
852
|
-
db = get_db()
|
|
853
|
-
|
|
854
|
-
status = 0 # Assume success
|
|
855
|
-
error_msg = ""
|
|
856
|
-
new_project_name, new_referentiel_name = build_activity_libelle(
|
|
857
|
-
nsip_id,
|
|
858
|
-
master_project,
|
|
859
|
-
project,
|
|
860
|
-
)
|
|
861
|
-
|
|
862
|
-
try:
|
|
863
|
-
referentiel_entry = Referentiel.query.filter(Referentiel.id == hito_referentiel_id).first()
|
|
864
|
-
project_entry = Projet.query.filter(Projet.id == hito_project_id).first()
|
|
865
|
-
referentiel_entry.libelle = new_referentiel_name
|
|
866
|
-
project_entry.libelle = new_project_name
|
|
867
|
-
change_log_msg = f"Modifié le {datetime.now()}"
|
|
868
|
-
if project_entry.description:
|
|
869
|
-
project_entry.description += f"; {change_log_msg}"
|
|
870
|
-
else:
|
|
871
|
-
project_entry.description = change_log_msg
|
|
872
|
-
db.session.commit()
|
|
873
|
-
except Exception as e:
|
|
874
|
-
status = 1
|
|
875
|
-
error_msg = getattr(e, "message", repr(e))
|
|
876
|
-
db.session.rollback()
|
|
877
|
-
|
|
878
|
-
return status, error_msg
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
def add_activity(
|
|
882
|
-
nsip_id: str,
|
|
883
|
-
master_project: str,
|
|
884
|
-
project: str,
|
|
885
|
-
activity_teams: List[str],
|
|
886
|
-
project_activity: bool,
|
|
887
|
-
):
|
|
888
|
-
"""
|
|
889
|
-
Adds a new project in Hito referenciel and in Hito project/activity table
|
|
890
|
-
|
|
891
|
-
:param nsip_id: NSIP ID for the project
|
|
892
|
-
:param master_project: master project name
|
|
893
|
-
:param project: project name
|
|
894
|
-
:param activity_teams: list of team IDs associated with the project
|
|
895
|
-
:param project_activity: if True it is a NSIP project, else a NSIP activity
|
|
896
|
-
:return: 0 if update succeeded, non-zero if an error occured, error_msg if
|
|
897
|
-
an error occured
|
|
898
|
-
"""
|
|
899
|
-
|
|
900
|
-
from ositah.utils.hito_db_model import Projet, Referentiel, Team
|
|
901
|
-
|
|
902
|
-
db = get_db()
|
|
903
|
-
|
|
904
|
-
status = 0 # Assume success
|
|
905
|
-
error_msg = ""
|
|
906
|
-
project_name, referentiel_name = build_activity_libelle(
|
|
907
|
-
nsip_id,
|
|
908
|
-
master_project,
|
|
909
|
-
project,
|
|
910
|
-
)
|
|
911
|
-
|
|
912
|
-
if project_activity:
|
|
913
|
-
entry_class = "projetnsipreferentiel"
|
|
914
|
-
entry_order = NSIP_PROJECT_ORDER
|
|
915
|
-
else:
|
|
916
|
-
entry_class = "activitensipreferentiel"
|
|
917
|
-
entry_order = NSIP_ACIVITY_ORDER
|
|
918
|
-
|
|
919
|
-
try:
|
|
920
|
-
referentiel_entry = Referentiel(
|
|
921
|
-
libelle=referentiel_name,
|
|
922
|
-
object_class=entry_class,
|
|
923
|
-
ordre=entry_order,
|
|
924
|
-
)
|
|
925
|
-
activity_entry = Projet(
|
|
926
|
-
libelle=project_name,
|
|
927
|
-
description=f"Créé le {datetime.now()}",
|
|
928
|
-
ordre=entry_order,
|
|
929
|
-
)
|
|
930
|
-
if activity_teams:
|
|
931
|
-
activity_entry.teams = Team.query.filter(Team.id.in_(activity_teams)).all()
|
|
932
|
-
db.session.add(referentiel_entry)
|
|
933
|
-
db.session.add(activity_entry)
|
|
934
|
-
db.session.commit()
|
|
935
|
-
# Define relationship between activity and referentiel entry after creating them so that
|
|
936
|
-
# the referentiel ID generated by the DB server can be accessed
|
|
937
|
-
if project_activity:
|
|
938
|
-
activity_entry.projet_nsip_referentiel_id = referentiel_entry.id
|
|
939
|
-
else:
|
|
940
|
-
activity_entry.activite_nsip_referentiel_id = referentiel_entry.id
|
|
941
|
-
db.session.commit()
|
|
942
|
-
|
|
943
|
-
except Exception as e:
|
|
944
|
-
status = 1
|
|
945
|
-
error_msg = getattr(e, "message", repr(e))
|
|
946
|
-
db.session.rollback()
|
|
947
|
-
|
|
948
|
-
return status, error_msg
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
def remove_activity(
|
|
952
|
-
hito_project_id: str,
|
|
953
|
-
hito_referentiel_id: str,
|
|
954
|
-
nsip_id: str,
|
|
955
|
-
project_activity: bool,
|
|
956
|
-
):
|
|
957
|
-
"""
|
|
958
|
-
Remove the association between a Hito activity (project or reference) and NSIP. The Hito
|
|
959
|
-
activity is kept as it may be referenced by other objects but its description is updated
|
|
960
|
-
to mention that it is no longer in NSIP. The project name is updated so that it appears in the
|
|
961
|
-
pseudo-masterproject NSIP_DELETED_MASTERPROJECT. Associated teams are removed.
|
|
962
|
-
|
|
963
|
-
:param hito_project_id: Hito project ID
|
|
964
|
-
:param hito_referentiel_id: Hito referentiel ID for the project
|
|
965
|
-
:param nsip_id: NSIP ID for the project
|
|
966
|
-
:param project_activity: if True it is a NSIP project, else a NSIP activity
|
|
967
|
-
:return: 0 if update succeeded, non-zero if an error occured, error_msg if
|
|
968
|
-
an error occured
|
|
969
|
-
"""
|
|
970
|
-
|
|
971
|
-
from ositah.utils.hito_db_model import Projet, Referentiel
|
|
972
|
-
|
|
973
|
-
db = get_db()
|
|
974
|
-
|
|
975
|
-
status = 0 # Assume success
|
|
976
|
-
error_msg = ""
|
|
977
|
-
|
|
978
|
-
try:
|
|
979
|
-
referentiel_entry = Referentiel.query.filter(Referentiel.id == hito_referentiel_id).first()
|
|
980
|
-
db.session.query()
|
|
981
|
-
activity_entry = Projet.query.filter(Projet.id == hito_project_id).first()
|
|
982
|
-
if project_activity:
|
|
983
|
-
activity_entry.projet_nsip_referentiel_id = None
|
|
984
|
-
else:
|
|
985
|
-
activity_entry.activite_nsip_referentiel_id = None
|
|
986
|
-
change_log_msg = f"Desactivé le {datetime.now()} (NSIP ID={nsip_id})"
|
|
987
|
-
if activity_entry.description:
|
|
988
|
-
activity_entry.description += f"; {change_log_msg}"
|
|
989
|
-
else:
|
|
990
|
-
activity_entry.description = change_log_msg
|
|
991
|
-
activity_entry.libelle = f"{MASTERPROJECT_DELETED_ACTIVITY} / {activity_entry.libelle}"
|
|
992
|
-
activity_entry.ordre = DISABLED_ACTIVITY_ORDER
|
|
993
|
-
if len(activity_entry.teams) > 0:
|
|
994
|
-
activity_entry.teams.clear()
|
|
995
|
-
db.session.delete(referentiel_entry)
|
|
996
|
-
db.session.commit()
|
|
997
|
-
except Exception as e:
|
|
998
|
-
status = 1
|
|
999
|
-
error_msg = getattr(e, "message", repr(e))
|
|
1000
|
-
db.session.rollback()
|
|
1001
|
-
|
|
1002
|
-
return status, error_msg
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
def add_activity_teams(
|
|
1006
|
-
masterproject: str, project: str, team_list: List[str], project_activity: bool
|
|
1007
|
-
):
|
|
1008
|
-
"""
|
|
1009
|
-
Add teams to an activity.
|
|
1010
|
-
|
|
1011
|
-
:param masterproject: activity masterproject name
|
|
1012
|
-
:param project: activity project name
|
|
1013
|
-
:param team_list: list of team names to add
|
|
1014
|
-
:param project_activity: if true, an Hito project else an Hito activity
|
|
1015
|
-
:return: status (0 if success), error_msg (empty if success)
|
|
1016
|
-
"""
|
|
1017
|
-
|
|
1018
|
-
from ositah.utils.hito_db_model import Activite, Projet, Team
|
|
1019
|
-
|
|
1020
|
-
db = get_db()
|
|
1021
|
-
|
|
1022
|
-
status = 0 # Assume success
|
|
1023
|
-
error_msg = ""
|
|
1024
|
-
|
|
1025
|
-
if project_activity:
|
|
1026
|
-
Activity = Projet
|
|
1027
|
-
else:
|
|
1028
|
-
Activity = Activite
|
|
1029
|
-
|
|
1030
|
-
activity_name = ositah2hito_project_name(masterproject, project)
|
|
1031
|
-
activity = Activity.query.filter(Activity.libelle == activity_name).first()
|
|
1032
|
-
|
|
1033
|
-
if activity:
|
|
1034
|
-
try:
|
|
1035
|
-
for team in team_list:
|
|
1036
|
-
team_object = Team.query.filter(Team.nom == team).first()
|
|
1037
|
-
activity.teams.append(team_object)
|
|
1038
|
-
db.session.commit()
|
|
1039
|
-
except Exception as e:
|
|
1040
|
-
status = 1
|
|
1041
|
-
error_msg = getattr(e, "message", repr(e))
|
|
1042
|
-
db.session.rollback()
|
|
1043
|
-
|
|
1044
|
-
return status, error_msg
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
def remove_activity_teams(
|
|
1048
|
-
masterproject: str, project: str, team_list: List[str], project_activity: bool
|
|
1049
|
-
):
|
|
1050
|
-
"""
|
|
1051
|
-
Remove teams from an activity. If the team is not present in teams list, silently
|
|
1052
|
-
ignore it.
|
|
1053
|
-
|
|
1054
|
-
:param masterproject: activity masterproject name
|
|
1055
|
-
:param project: activity project name
|
|
1056
|
-
:param team_list: list of team names to remove
|
|
1057
|
-
:param project_activity: if true, an Hito project else an Hito activity
|
|
1058
|
-
:return: status (0 if success), error_msg (empty if success)
|
|
1059
|
-
"""
|
|
1060
|
-
|
|
1061
|
-
from ositah.utils.hito_db_model import Activite, Projet, Team
|
|
1062
|
-
|
|
1063
|
-
db = get_db()
|
|
1064
|
-
|
|
1065
|
-
status = 0 # Assume success
|
|
1066
|
-
error_msg = ""
|
|
1067
|
-
|
|
1068
|
-
if project_activity:
|
|
1069
|
-
Activity = Projet
|
|
1070
|
-
else:
|
|
1071
|
-
Activity = Activite
|
|
1072
|
-
|
|
1073
|
-
activity_name = ositah2hito_project_name(masterproject, project)
|
|
1074
|
-
activity = Activity.query.filter(Activity.libelle == activity_name).first()
|
|
1075
|
-
|
|
1076
|
-
if activity:
|
|
1077
|
-
try:
|
|
1078
|
-
for team in team_list:
|
|
1079
|
-
team_object = Team.query.filter(Team.nom == team).first()
|
|
1080
|
-
if team_object in activity.teams:
|
|
1081
|
-
activity.teams.remove(team_object)
|
|
1082
|
-
db.session.commit()
|
|
1083
|
-
except Exception as e:
|
|
1084
|
-
status = 1
|
|
1085
|
-
error_msg = getattr(e, "message", repr(e))
|
|
1086
|
-
db.session.rollback()
|
|
1087
|
-
|
|
1088
|
-
return status, error_msg
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
def reenable_activity(activity_name: str, project_activity: bool, name_prefix: str = None):
|
|
1092
|
-
"""
|
|
1093
|
-
Reenable a disabled activity. This involves:
|
|
1094
|
-
- Updating master project to match the original one
|
|
1095
|
-
- If it was an NSIP project, recreate the referentiel entry
|
|
1096
|
-
|
|
1097
|
-
:param activity_name: activity name
|
|
1098
|
-
:param project_activity: if true, an Hito project else an Hito activity
|
|
1099
|
-
:param name_prefix: activity name prefix for deleted or local activities
|
|
1100
|
-
:return: status and error message if any
|
|
1101
|
-
"""
|
|
1102
|
-
|
|
1103
|
-
from ositah.utils.hito_db_model import Activite, Projet, Referentiel
|
|
1104
|
-
|
|
1105
|
-
db = get_db()
|
|
1106
|
-
|
|
1107
|
-
status = 0 # Assume success
|
|
1108
|
-
error_msg = ""
|
|
1109
|
-
|
|
1110
|
-
if project_activity:
|
|
1111
|
-
Activity = Projet
|
|
1112
|
-
else:
|
|
1113
|
-
Activity = Activite
|
|
1114
|
-
|
|
1115
|
-
# Retrieve activity attributes and NSIP ID if present in description
|
|
1116
|
-
if name_prefix:
|
|
1117
|
-
activity_full_name = f"{name_prefix} / {activity_name}"
|
|
1118
|
-
else:
|
|
1119
|
-
activity_full_name = activity_name
|
|
1120
|
-
activity_entry = Activity.query.filter(Activity.libelle == activity_full_name).first()
|
|
1121
|
-
|
|
1122
|
-
m = re.search(r"\(NSIP ID\=(?P<id>\d+)\)$", activity_entry.description)
|
|
1123
|
-
if m:
|
|
1124
|
-
nsip_id = m.group("id")
|
|
1125
|
-
else:
|
|
1126
|
-
nsip_id = None
|
|
1127
|
-
|
|
1128
|
-
# Check if an entry exist in the referentiel for the NSIP ID: if not, create it
|
|
1129
|
-
nsip_entry = Referentiel.query.filter(
|
|
1130
|
-
Referentiel.libelle.ilike(f"%(NSIP ID = {nsip_id})")
|
|
1131
|
-
).first()
|
|
1132
|
-
if not nsip_entry:
|
|
1133
|
-
master_project, activity = hito2ositah_project_name(activity_name)
|
|
1134
|
-
project_name, referentiel_name = build_activity_libelle(
|
|
1135
|
-
nsip_id,
|
|
1136
|
-
master_project,
|
|
1137
|
-
activity,
|
|
1138
|
-
)
|
|
1139
|
-
|
|
1140
|
-
if project_activity:
|
|
1141
|
-
entry_class = "projetnsipreferentiel"
|
|
1142
|
-
entry_order = NSIP_PROJECT_ORDER
|
|
1143
|
-
else:
|
|
1144
|
-
entry_class = "activitensipreferentiel"
|
|
1145
|
-
entry_order = NSIP_ACIVITY_ORDER
|
|
1146
|
-
|
|
1147
|
-
referentiel_entry = Referentiel(
|
|
1148
|
-
libelle=referentiel_name,
|
|
1149
|
-
object_class=entry_class,
|
|
1150
|
-
ordre=entry_order,
|
|
1151
|
-
)
|
|
1152
|
-
else:
|
|
1153
|
-
referentiel_entry = None
|
|
1154
|
-
|
|
1155
|
-
# Create referentiel entry if necessary and update activity
|
|
1156
|
-
try:
|
|
1157
|
-
if referentiel_entry:
|
|
1158
|
-
db.session.add(referentiel_entry)
|
|
1159
|
-
activity_entry.libelle = activity_name
|
|
1160
|
-
activity_entry.description = f"Modifié le {datetime.now()}"
|
|
1161
|
-
db.session.commit()
|
|
1162
|
-
# Define relationship between activity and referentiel entry after creating them so that
|
|
1163
|
-
# the referentiel ID generated by the DB server can be accessed
|
|
1164
|
-
if project_activity:
|
|
1165
|
-
activity_entry.projet_nsip_referentiel_id = referentiel_entry.id
|
|
1166
|
-
else:
|
|
1167
|
-
activity_entry.activite_nsip_referentiel_id = referentiel_entry.id
|
|
1168
|
-
db.session.commit()
|
|
1169
|
-
|
|
1170
|
-
except Exception as e:
|
|
1171
|
-
status = 1
|
|
1172
|
-
error_msg = getattr(e, "message", repr(e))
|
|
1173
|
-
db.session.rollback()
|
|
1174
|
-
|
|
1175
|
-
# Clear cached data to force a refresh of project list
|
|
1176
|
-
clear_cached_data()
|
|
1177
|
-
|
|
1178
|
-
return status, error_msg
|
|
1
|
+
# Helper functions related to projects and time declarations
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import List, Tuple
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
import pandas as pd
|
|
9
|
+
from sqlalchemy.orm import joinedload
|
|
10
|
+
|
|
11
|
+
from ositah.utils.agents import get_agents
|
|
12
|
+
from ositah.utils.cache import clear_cached_data
|
|
13
|
+
from ositah.utils.exceptions import InvalidDataSource, InvalidHitoProjectName
|
|
14
|
+
from ositah.utils.hito_db import get_db
|
|
15
|
+
from ositah.utils.period import get_validation_period_data
|
|
16
|
+
from ositah.utils.utils import (
|
|
17
|
+
DAY_HOURS,
|
|
18
|
+
TEAM_LIST_ALL_AGENTS,
|
|
19
|
+
TIME_UNIT_HOURS,
|
|
20
|
+
TIME_UNIT_HOURS_EN,
|
|
21
|
+
TIME_UNIT_HOURS_FR,
|
|
22
|
+
TIME_UNIT_WEEKS,
|
|
23
|
+
TIME_UNIT_WEEKS_EN,
|
|
24
|
+
TIME_UNIT_WEEKS_FR,
|
|
25
|
+
WEEK_HOURS,
|
|
26
|
+
GlobalParams,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
CATEGORY_DEFAULT = "nsip_project"
|
|
30
|
+
|
|
31
|
+
DATA_SOURCE_HITO = "hito"
|
|
32
|
+
DATA_SOURCE_OSITAH = "ositah"
|
|
33
|
+
|
|
34
|
+
NSIP_CLASS_OTHER_ACTIVITY = "activitensipreferentiel"
|
|
35
|
+
NSIP_CLASS_PROJECT = "projetnsipreferentiel"
|
|
36
|
+
|
|
37
|
+
MASTERPROJECT_DELETED_ACTIVITY = "Disabled"
|
|
38
|
+
MASTERPROJECT_LOCAL_PROJECT = "Local Projects"
|
|
39
|
+
|
|
40
|
+
NSIP_PROJECT_ORDER = 1
|
|
41
|
+
LOCAL_PROJECT_ORDER = 2
|
|
42
|
+
NSIP_ACIVITY_ORDER = 3
|
|
43
|
+
DISABLED_ACTIVITY_ORDER = 9999
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def hito2ositah_project_name(hito_name):
|
|
47
|
+
"""
|
|
48
|
+
Split a Hito project name into a masterprojet and project name
|
|
49
|
+
|
|
50
|
+
:param hito_name: Hito name with masterprojet and project name separated by a /
|
|
51
|
+
:return: masterprojet and project name
|
|
52
|
+
"""
|
|
53
|
+
masterproject, project_name = hito_name.split(" / ", 2)
|
|
54
|
+
return masterproject, project_name
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def ositah2hito_project_name(masterproject, project):
|
|
58
|
+
"""
|
|
59
|
+
Build the Hito/NSIP project name from the masterproject and project
|
|
60
|
+
|
|
61
|
+
:param masterproject: masterproject name
|
|
62
|
+
:param project: project name
|
|
63
|
+
:return: Hito/NSIP project fullname
|
|
64
|
+
"""
|
|
65
|
+
return " / ".join([masterproject, project])
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def nsip2ositah_project_name(masterproject, project):
|
|
69
|
+
"""
|
|
70
|
+
Build the OSITAH project name from the NSIP project name, removing the master project
|
|
71
|
+
name if it is at the head of the NSIP project name, except if the master project name
|
|
72
|
+
and the project name are identical.
|
|
73
|
+
|
|
74
|
+
:param masterproject: masterproject name
|
|
75
|
+
:param project: project name
|
|
76
|
+
:return: OSITAH project name (without the masterproject name)
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
if project != masterproject:
|
|
80
|
+
m = re.match(rf"{masterproject}\s+\-\s+(?P<project>.*)", project)
|
|
81
|
+
if m:
|
|
82
|
+
project = m.group("project")
|
|
83
|
+
|
|
84
|
+
return project
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def category_from_activity(category_patterns, activity) -> str:
|
|
88
|
+
"""
|
|
89
|
+
Return the activity category if the activity matches the pattern. Else an empty string.
|
|
90
|
+
Called as a lambda to build the category column.
|
|
91
|
+
|
|
92
|
+
:param category_patterns: category patterns to match against the activity (dict where
|
|
93
|
+
the key is the pattern and the value is the category)
|
|
94
|
+
:param activity: activity name
|
|
95
|
+
:return: category or np.Nan
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
for pattern, category in category_patterns.items():
|
|
99
|
+
if re.match(pattern.lower(), activity.lower()):
|
|
100
|
+
return category
|
|
101
|
+
|
|
102
|
+
return np.NaN
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def activity_from_project(project):
|
|
106
|
+
"""
|
|
107
|
+
Return the activity the project belongs to.
|
|
108
|
+
|
|
109
|
+
:param project: project name
|
|
110
|
+
:return: activity name
|
|
111
|
+
"""
|
|
112
|
+
|
|
113
|
+
global_params = GlobalParams()
|
|
114
|
+
|
|
115
|
+
for activity, pattern in global_params.project_categories.items():
|
|
116
|
+
if re.match(pattern, project):
|
|
117
|
+
return activity
|
|
118
|
+
|
|
119
|
+
return CATEGORY_DEFAULT
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def reference_masterproject(reference_type):
|
|
123
|
+
"""
|
|
124
|
+
Return an OSITAH masterproject for a NSIP reference, based on its type.
|
|
125
|
+
Masterprojects for each type is defined in the configuration. A reference type
|
|
126
|
+
without a match or with an empty value is ignored (np.NaN returned).
|
|
127
|
+
|
|
128
|
+
:param reference_type: NSIP reference type
|
|
129
|
+
:return: matching master project
|
|
130
|
+
"""
|
|
131
|
+
|
|
132
|
+
global_params = GlobalParams()
|
|
133
|
+
|
|
134
|
+
for type_pattern, masterproject in global_params.reference_masterprojects.items():
|
|
135
|
+
if re.match(type_pattern.lower(), reference_type.lower()):
|
|
136
|
+
if len(masterproject) > 0:
|
|
137
|
+
return masterproject
|
|
138
|
+
else:
|
|
139
|
+
return np.NaN
|
|
140
|
+
|
|
141
|
+
return np.NaN
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def time_unit(category, short=False, english=True, parenthesis=False) -> str:
|
|
145
|
+
"""
|
|
146
|
+
Return the time unit as defined in the configuration as a string. If the category/column is
|
|
147
|
+
not in the configuration, return an empty string.
|
|
148
|
+
|
|
149
|
+
:param category: project category/class
|
|
150
|
+
:param short: if true, return abbreviated unit names
|
|
151
|
+
:param english: return english unit names if true. Also implies short=False
|
|
152
|
+
:param parenthesis: if True, enclose the string in ()
|
|
153
|
+
:return: time unit for the category as a string
|
|
154
|
+
"""
|
|
155
|
+
|
|
156
|
+
global_params = GlobalParams()
|
|
157
|
+
|
|
158
|
+
if english:
|
|
159
|
+
unit_w = TIME_UNIT_WEEKS_EN
|
|
160
|
+
unit_h = TIME_UNIT_HOURS_EN
|
|
161
|
+
else:
|
|
162
|
+
if short:
|
|
163
|
+
unit_w = "sem."
|
|
164
|
+
unit_h = "h"
|
|
165
|
+
else:
|
|
166
|
+
unit_w = TIME_UNIT_WEEKS_FR
|
|
167
|
+
unit_h = TIME_UNIT_HOURS_FR
|
|
168
|
+
|
|
169
|
+
if category in global_params.time_unit:
|
|
170
|
+
if global_params.time_unit[category] == TIME_UNIT_WEEKS:
|
|
171
|
+
unit_str = unit_w
|
|
172
|
+
elif global_params.time_unit[category] == TIME_UNIT_HOURS:
|
|
173
|
+
unit_str = unit_h
|
|
174
|
+
else:
|
|
175
|
+
raise Exception(
|
|
176
|
+
(
|
|
177
|
+
f"Unsupported time unit '{global_params.time_unit[category]}'"
|
|
178
|
+
f" for category {category}"
|
|
179
|
+
)
|
|
180
|
+
)
|
|
181
|
+
else:
|
|
182
|
+
return ""
|
|
183
|
+
|
|
184
|
+
if parenthesis:
|
|
185
|
+
return f"({unit_str})"
|
|
186
|
+
else:
|
|
187
|
+
return unit_str
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def category_time_and_unit(category, hours, short=True, english=False) -> Tuple[int, str]:
|
|
191
|
+
"""
|
|
192
|
+
Return the rounded category time in the appropriate unit and the category time unit
|
|
193
|
+
|
|
194
|
+
:param category: project category/class
|
|
195
|
+
:param hours: number of hours
|
|
196
|
+
:param short: if true, return abbreviated unit names
|
|
197
|
+
:param english: return english unit names if true. Also implies short=False
|
|
198
|
+
:return: project time, project unit
|
|
199
|
+
"""
|
|
200
|
+
|
|
201
|
+
global_params = GlobalParams()
|
|
202
|
+
|
|
203
|
+
unit = time_unit(category, short, english)
|
|
204
|
+
|
|
205
|
+
if global_params.time_unit[category] == "w":
|
|
206
|
+
declared_time = f"{int(round(hours / WEEK_HOURS))}"
|
|
207
|
+
else:
|
|
208
|
+
declared_time = f"{int(round(hours))}"
|
|
209
|
+
|
|
210
|
+
return declared_time, unit
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def project_time(project, hours):
|
|
214
|
+
"""
|
|
215
|
+
Return the rounded project time in the appropriate unit and the project time unit
|
|
216
|
+
|
|
217
|
+
:param project: project name
|
|
218
|
+
:param hours: number of hours
|
|
219
|
+
:return: project time, abbreviated project unit
|
|
220
|
+
"""
|
|
221
|
+
|
|
222
|
+
return category_time_and_unit(activity_from_project(project), hours)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def get_team_projects(
|
|
226
|
+
team,
|
|
227
|
+
team_selection_date,
|
|
228
|
+
period_date: datetime,
|
|
229
|
+
source=DATA_SOURCE_HITO,
|
|
230
|
+
use_cache: bool = True,
|
|
231
|
+
):
|
|
232
|
+
"""
|
|
233
|
+
Query the Hito database and return a dataframe will all the project contributions for a given
|
|
234
|
+
team. The dataframe has one row for each each agent contribution to each project.
|
|
235
|
+
|
|
236
|
+
:param team: selected team or TEAM_LIST_ALL_AGENTS for all teams
|
|
237
|
+
:param team_selection_date: last time the team selection was changed
|
|
238
|
+
:param period_date: a date that must be inside the declaration period
|
|
239
|
+
:param source: whether to use Hito (non validated) or OSITAH (validated) as a data source
|
|
240
|
+
:param use_cache: if true, use the cache if defined and up-to-date or update it with the
|
|
241
|
+
new declarations
|
|
242
|
+
:return: dataframe or None if the query returned no entry
|
|
243
|
+
"""
|
|
244
|
+
|
|
245
|
+
from ositah.utils.hito_db_model import (
|
|
246
|
+
ActiviteDetail,
|
|
247
|
+
Agent,
|
|
248
|
+
OSITAHProjectDeclaration,
|
|
249
|
+
OSITAHValidation,
|
|
250
|
+
Projet,
|
|
251
|
+
Team,
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
global_params = GlobalParams()
|
|
255
|
+
columns = global_params.columns
|
|
256
|
+
session_data = global_params.session_data
|
|
257
|
+
db = get_db()
|
|
258
|
+
|
|
259
|
+
validation_period = get_validation_period_data(period_date)
|
|
260
|
+
|
|
261
|
+
# Check if there is a cached version
|
|
262
|
+
if session_data.project_declarations is not None and use_cache:
|
|
263
|
+
if (
|
|
264
|
+
session_data.project_declarations_source is None
|
|
265
|
+
or source != session_data.project_declarations_source
|
|
266
|
+
or datetime.fromisoformat(team_selection_date) > session_data.cache_date
|
|
267
|
+
):
|
|
268
|
+
# Cache must be refreshed if the selected source doesn't match the cached one or if
|
|
269
|
+
# the team has been modified since the cache was loaded (required for multi-worker
|
|
270
|
+
# configurations as the team selection does not necessarily happen on the same worker
|
|
271
|
+
# than the later processing). In a multi-worker configuration is used it may also
|
|
272
|
+
# happen that the declaration source is not defined if it was initially initialised
|
|
273
|
+
# on another worker
|
|
274
|
+
clear_cached_data()
|
|
275
|
+
else:
|
|
276
|
+
return session_data.project_declarations
|
|
277
|
+
|
|
278
|
+
if source == DATA_SOURCE_OSITAH:
|
|
279
|
+
# The query relies on the fact that only one validation entry can be in the validated
|
|
280
|
+
# state for a given period, something enforced by the declaration validation.
|
|
281
|
+
# When a team is specified, display all projects from this team and the children teams
|
|
282
|
+
query = (
|
|
283
|
+
OSITAHProjectDeclaration.query.join(
|
|
284
|
+
OSITAHValidation,
|
|
285
|
+
OSITAHProjectDeclaration.validation_id == OSITAHValidation.id,
|
|
286
|
+
)
|
|
287
|
+
.join(Agent, Agent.id == OSITAHValidation.agent_id)
|
|
288
|
+
.join(Team, Team.id == Agent.team_id)
|
|
289
|
+
.add_entity(Agent)
|
|
290
|
+
.add_entity(Team)
|
|
291
|
+
.add_entity(OSITAHValidation)
|
|
292
|
+
.filter(OSITAHValidation.validated)
|
|
293
|
+
.filter(OSITAHValidation.period_id == validation_period.id)
|
|
294
|
+
)
|
|
295
|
+
if team != TEAM_LIST_ALL_AGENTS:
|
|
296
|
+
query = query.filter(Team.nom.ilike(f"{team}%"))
|
|
297
|
+
declarations = pd.read_sql(query.statement, db.session.bind)
|
|
298
|
+
if len(declarations) == 0:
|
|
299
|
+
return None
|
|
300
|
+
declarations.rename(columns={"id": columns["activity_id"]}, inplace=True)
|
|
301
|
+
declarations.rename(columns={"id_1": columns["agent_id"]}, inplace=True)
|
|
302
|
+
declarations.rename(columns={"nom_1": columns["team"]}, inplace=True)
|
|
303
|
+
declarations.rename(columns={"hours": columns["hours"]}, inplace=True)
|
|
304
|
+
declarations.rename(columns={"id_3": "validation_id"}, inplace=True)
|
|
305
|
+
declarations.rename(columns={"timestamp": "validation_time"}, inplace=True)
|
|
306
|
+
# Drop statut column to avoid conflicts in future merge with the Agent table
|
|
307
|
+
declarations.drop(columns=["statut"], inplace=True)
|
|
308
|
+
# Ensure that email_auth is defined and if it is not, replace it by the email.
|
|
309
|
+
declarations.loc[declarations[columns["email_auth"]].isna(), columns["email_auth"]] = (
|
|
310
|
+
declarations[columns["email"]]
|
|
311
|
+
)
|
|
312
|
+
declarations[columns["activity"]] = declarations.apply(
|
|
313
|
+
lambda row: ositah2hito_project_name(
|
|
314
|
+
row[columns["masterproject"]], row[columns["project"]]
|
|
315
|
+
),
|
|
316
|
+
axis=1,
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
elif source == DATA_SOURCE_HITO:
|
|
320
|
+
# For team names, we want to keep the agent team name instead of the team_name in
|
|
321
|
+
# activity_details so it must be specified explicitely in the join with Team table
|
|
322
|
+
query = (
|
|
323
|
+
ActiviteDetail.query.join(Projet)
|
|
324
|
+
.join(Agent)
|
|
325
|
+
.join(Team, Team.id == Agent.team_id)
|
|
326
|
+
.add_entity(Projet)
|
|
327
|
+
.add_entity(Agent)
|
|
328
|
+
.add_entity(Team)
|
|
329
|
+
.filter(
|
|
330
|
+
ActiviteDetail.date >= validation_period.start_date,
|
|
331
|
+
ActiviteDetail.date <= validation_period.end_date,
|
|
332
|
+
)
|
|
333
|
+
)
|
|
334
|
+
if team != TEAM_LIST_ALL_AGENTS:
|
|
335
|
+
query = query.filter(Team.nom.ilike(f"{team}%"))
|
|
336
|
+
daily_declarations = pd.read_sql(query.statement, db.session.bind)
|
|
337
|
+
if len(daily_declarations) == 0:
|
|
338
|
+
return None
|
|
339
|
+
# Pandas add a suffix to duplicate column names, the first one being unchanged, the
|
|
340
|
+
# second being suffixed _1...
|
|
341
|
+
daily_declarations.drop(columns=["id", "id_1", "id_2"], inplace=True)
|
|
342
|
+
daily_declarations.rename(columns={"agent_id": columns["agent_id"]}, inplace=True)
|
|
343
|
+
daily_declarations.rename(columns={"libelle": columns["activity"]}, inplace=True)
|
|
344
|
+
daily_declarations.rename(columns={"nom_1": columns["team"]}, inplace=True)
|
|
345
|
+
for column in [columns["hours"], columns["percent"]]:
|
|
346
|
+
daily_declarations[column] = daily_declarations[column].astype(float)
|
|
347
|
+
# Ensure that email_auth is defined and if it is not, replace it by the email. If left
|
|
348
|
+
# undefined, the entries will not be present in the pivot table as it is part of the index.
|
|
349
|
+
daily_declarations.loc[
|
|
350
|
+
daily_declarations[columns["email_auth"]].isna(), columns["email_auth"]
|
|
351
|
+
] = daily_declarations[columns["email"]]
|
|
352
|
+
# Rebuild agent quotite by comparing the time declared with the percent computed by Hito
|
|
353
|
+
# based on the quotite
|
|
354
|
+
daily_declarations[columns["quotite"]] = (
|
|
355
|
+
daily_declarations[columns["hours"]] / DAY_HOURS * 100
|
|
356
|
+
) / daily_declarations[columns["percent"]]
|
|
357
|
+
global_declarations_pt = pd.pivot_table(
|
|
358
|
+
daily_declarations,
|
|
359
|
+
index=[
|
|
360
|
+
columns["lastname"],
|
|
361
|
+
columns["firstname"],
|
|
362
|
+
columns["activity"],
|
|
363
|
+
columns["activity_id"],
|
|
364
|
+
columns["team"],
|
|
365
|
+
columns["agent_id"],
|
|
366
|
+
columns["email_auth"],
|
|
367
|
+
columns["email"],
|
|
368
|
+
],
|
|
369
|
+
values=[columns["hours"], columns["quotite"]],
|
|
370
|
+
aggfunc={columns["hours"]: "sum", columns["quotite"]: "mean"},
|
|
371
|
+
)
|
|
372
|
+
declarations = pd.DataFrame(global_declarations_pt.to_records())
|
|
373
|
+
declarations[[columns["masterproject"], columns["project"]]] = declarations[
|
|
374
|
+
columns["activity"]
|
|
375
|
+
].str.split(" / ", n=1, expand=True)
|
|
376
|
+
# An entry in the pseudo master project MASTERPROJECT_DELETED_ACTIVITY is a special
|
|
377
|
+
# case corresponding to deleted NSIP projects: the real name is in the project part that
|
|
378
|
+
# must be parsed as for any other project
|
|
379
|
+
declarations["project_saved"] = np.NaN
|
|
380
|
+
declarations["project_saved"] = declarations["project_saved"].astype("object")
|
|
381
|
+
declarations.loc[
|
|
382
|
+
declarations[columns["masterproject"]] == MASTERPROJECT_DELETED_ACTIVITY,
|
|
383
|
+
"project_saved",
|
|
384
|
+
] = declarations[columns["project"]]
|
|
385
|
+
# Not sure why the following line doesn't work (masterproject and project set to NaN
|
|
386
|
+
# if no row matches the indexing condition... An issue has been open:
|
|
387
|
+
# https://github.com/pandas-dev/pandas/issues/44726.
|
|
388
|
+
# declarations.loc[
|
|
389
|
+
# declarations.project_saved.notna(),
|
|
390
|
+
# [columns["masterproject"], columns["project"]],
|
|
391
|
+
# ] = declarations.project_saved.str.split(" / ", n=1, expand=True)
|
|
392
|
+
#
|
|
393
|
+
# The following workaround fails if project_saved contains only np.NaN. It is a known
|
|
394
|
+
# issue in Panda 1.3.4, see https://github.com/pandas-dev/pandas/issues/35807
|
|
395
|
+
# declarations[
|
|
396
|
+
# ["newmaster", "newproject"]
|
|
397
|
+
# ] = declarations.project_saved.str.split(" / ", n=1, expand=True)
|
|
398
|
+
#
|
|
399
|
+
# Workaround based on
|
|
400
|
+
# https://github.com/pandas-dev/pandas/issues/35807#issuecomment-676912441. If no row
|
|
401
|
+
# matches the condition, only one column is created thus the need to check they all
|
|
402
|
+
# exist.
|
|
403
|
+
tmp_columns = ["newmaster", "newproject"]
|
|
404
|
+
saved_projects = (
|
|
405
|
+
declarations["project_saved"]
|
|
406
|
+
.str.split("/", expand=True, n=len(tmp_columns) - 1)
|
|
407
|
+
.rename(columns={k: name for k, name in enumerate(tmp_columns)})
|
|
408
|
+
)
|
|
409
|
+
for column in tmp_columns:
|
|
410
|
+
if column not in saved_projects.columns:
|
|
411
|
+
saved_projects[column] = np.NaN
|
|
412
|
+
declarations = declarations.join(saved_projects)
|
|
413
|
+
declarations.loc[declarations.project_saved.notna(), columns["masterproject"]] = (
|
|
414
|
+
declarations.newmaster
|
|
415
|
+
)
|
|
416
|
+
declarations.loc[declarations.project_saved.notna(), columns["project"]] = (
|
|
417
|
+
declarations.newproject
|
|
418
|
+
)
|
|
419
|
+
declarations.drop(columns=["newmaster", "newproject"], inplace=True)
|
|
420
|
+
declarations.loc[declarations.project_saved.notna(), columns["activity"]] = (
|
|
421
|
+
declarations.project_saved
|
|
422
|
+
)
|
|
423
|
+
|
|
424
|
+
# Detect project names not matching the format "masterproject / project"
|
|
425
|
+
invalid_hito_projects = declarations.loc[declarations[columns["project"]].isnull()]
|
|
426
|
+
if not invalid_hito_projects.empty:
|
|
427
|
+
raise InvalidHitoProjectName(
|
|
428
|
+
pd.Series(invalid_hito_projects[columns["masterproject"]]).unique()
|
|
429
|
+
)
|
|
430
|
+
|
|
431
|
+
else:
|
|
432
|
+
raise InvalidDataSource(source)
|
|
433
|
+
|
|
434
|
+
declarations[columns["fullname"]] = declarations[
|
|
435
|
+
[columns["lastname"], columns["firstname"]]
|
|
436
|
+
].agg(" ".join, axis=1)
|
|
437
|
+
declarations[columns["category"]] = declarations.apply(
|
|
438
|
+
lambda row: category_from_activity(
|
|
439
|
+
global_params.category_patterns, row[columns["activity"]]
|
|
440
|
+
),
|
|
441
|
+
axis=1,
|
|
442
|
+
)
|
|
443
|
+
declarations.loc[declarations[columns["category"]].isna(), "category"] = CATEGORY_DEFAULT
|
|
444
|
+
|
|
445
|
+
# Check quotite < 50% and flag the entry as suspect (generally means confusion between quotite
|
|
446
|
+
# and percent during declaration)
|
|
447
|
+
declarations["suspect"] = declarations[columns["quotite"]] < 0.5
|
|
448
|
+
|
|
449
|
+
if use_cache:
|
|
450
|
+
session_data.set_project_declarations(declarations, source)
|
|
451
|
+
|
|
452
|
+
return declarations
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
def get_all_hito_activities(project_activity: bool):
|
|
456
|
+
"""
|
|
457
|
+
Retrieve all projects or all activities defined in Hito with their associated teams
|
|
458
|
+
|
|
459
|
+
:param project_activity: if true, return all projects, else all Hito activities
|
|
460
|
+
:return: dataframe
|
|
461
|
+
"""
|
|
462
|
+
|
|
463
|
+
from ositah.utils.hito_db_model import Activite, Projet
|
|
464
|
+
|
|
465
|
+
global_params = GlobalParams()
|
|
466
|
+
session_data = global_params.session_data
|
|
467
|
+
db = get_db()
|
|
468
|
+
|
|
469
|
+
# Check if there is a cached version
|
|
470
|
+
if session_data.get_hito_activities(project_activity) is not None:
|
|
471
|
+
return session_data.get_hito_activities(project_activity)
|
|
472
|
+
|
|
473
|
+
else:
|
|
474
|
+
if project_activity:
|
|
475
|
+
Activity = Projet
|
|
476
|
+
else:
|
|
477
|
+
Activity = Activite
|
|
478
|
+
|
|
479
|
+
query = Activity.query.options(joinedload(Activity.teams))
|
|
480
|
+
activities = pd.read_sql(query.statement, db.session.bind)
|
|
481
|
+
activities[["masterproject", "project"]] = activities.libelle.str.split(
|
|
482
|
+
" / ", n=1, expand=True
|
|
483
|
+
)
|
|
484
|
+
activities.rename(columns={"description_1": "team_description"}, inplace=True)
|
|
485
|
+
activities.rename(columns={"nom": "team_name"}, inplace=True)
|
|
486
|
+
|
|
487
|
+
session_data.set_hito_activities(activities, project_activity)
|
|
488
|
+
|
|
489
|
+
return activities
|
|
490
|
+
|
|
491
|
+
|
|
492
|
+
def build_projects_data(team, team_selection_date, period_date: str, source):
|
|
493
|
+
"""
|
|
494
|
+
Build the project list contributed by the selected team and return it as a dataframe
|
|
495
|
+
|
|
496
|
+
:param team: selected team
|
|
497
|
+
:param team_selection_date: last time the team selection was changed
|
|
498
|
+
:param period_date: a date that must be inside the declaration period
|
|
499
|
+
:param source: whether to use Hito (non validated) or OSITAH (validated) as a data source
|
|
500
|
+
:return: dataframe with projects data, dataframe with agent declarations
|
|
501
|
+
"""
|
|
502
|
+
|
|
503
|
+
global_params = GlobalParams()
|
|
504
|
+
columns = global_params.columns
|
|
505
|
+
session_data = global_params.session_data
|
|
506
|
+
|
|
507
|
+
declaration_list = get_team_projects(team, team_selection_date, period_date, source)
|
|
508
|
+
if declaration_list is None:
|
|
509
|
+
return None, None
|
|
510
|
+
|
|
511
|
+
projects_data = session_data.projects_data
|
|
512
|
+
if projects_data is None:
|
|
513
|
+
projects_data_pt = pd.pivot_table(
|
|
514
|
+
declaration_list,
|
|
515
|
+
index=[
|
|
516
|
+
columns["masterproject"],
|
|
517
|
+
columns["project"],
|
|
518
|
+
columns["activity"],
|
|
519
|
+
columns["category"],
|
|
520
|
+
],
|
|
521
|
+
values=[columns["hours"]],
|
|
522
|
+
aggfunc={columns["hours"]: "sum"},
|
|
523
|
+
)
|
|
524
|
+
projects_data = pd.DataFrame(projects_data_pt.to_records())
|
|
525
|
+
projects_data[columns["hours"]] = np.round(projects_data[columns["hours"]]).astype("int")
|
|
526
|
+
projects_data[columns["weeks"]] = np.round(projects_data[columns["hours"]] / WEEK_HOURS, 1)
|
|
527
|
+
short_name_len = 25
|
|
528
|
+
projects_data["project_short"] = projects_data[columns["project"]]
|
|
529
|
+
projects_data.loc[
|
|
530
|
+
projects_data["project_short"].str.len() > short_name_len, "project_short"
|
|
531
|
+
] = projects_data["project_short"].str.slice_replace(start=short_name_len - 4, repl="...")
|
|
532
|
+
session_data.projects_data = projects_data
|
|
533
|
+
|
|
534
|
+
return projects_data, declaration_list
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
def get_hito_nsip_activities(project_activity: bool = True):
|
|
538
|
+
"""
|
|
539
|
+
Return a dataframe with all the NSIP activities defined in Hito. Activities can be
|
|
540
|
+
either projects or "references" (other activities). An activity is considered as a
|
|
541
|
+
NSIP activity if it has a matching entry in Hito referentiel.
|
|
542
|
+
|
|
543
|
+
:param project_activity: if true, return projects, else references
|
|
544
|
+
:return: dataframe
|
|
545
|
+
"""
|
|
546
|
+
|
|
547
|
+
from ositah.utils.hito_db_model import Projet, Referentiel
|
|
548
|
+
|
|
549
|
+
db = get_db()
|
|
550
|
+
|
|
551
|
+
if project_activity:
|
|
552
|
+
project_join_id = Projet.projet_nsip_referentiel_id
|
|
553
|
+
referentiel_class = "projetnsipreferentiel"
|
|
554
|
+
else:
|
|
555
|
+
project_join_id = Projet.activite_nsip_referentiel_id
|
|
556
|
+
referentiel_class = "activitensipreferentiel"
|
|
557
|
+
|
|
558
|
+
activity_query = (
|
|
559
|
+
Projet.query.join(Referentiel, Referentiel.id == project_join_id)
|
|
560
|
+
.add_entity(Referentiel)
|
|
561
|
+
.filter(
|
|
562
|
+
Referentiel.object_class == referentiel_class,
|
|
563
|
+
)
|
|
564
|
+
)
|
|
565
|
+
|
|
566
|
+
activities = pd.read_sql(activity_query.statement, db.session.bind)
|
|
567
|
+
|
|
568
|
+
activities.drop(
|
|
569
|
+
columns=[
|
|
570
|
+
"ordre",
|
|
571
|
+
"projet_nsip_referentiel_id",
|
|
572
|
+
"activite_nsip_referentiel_id",
|
|
573
|
+
],
|
|
574
|
+
inplace=True,
|
|
575
|
+
)
|
|
576
|
+
activities.rename(columns={"id_1": "referentiel_id"}, inplace=True)
|
|
577
|
+
activities.rename(columns={"libelle_1": "nsip_name_id"}, inplace=True)
|
|
578
|
+
|
|
579
|
+
if activities.empty:
|
|
580
|
+
activities[["nsip_master", "nsip_project", "nsip_project_id", "nsip_reference_id"]] = [
|
|
581
|
+
np.NaN,
|
|
582
|
+
np.NaN,
|
|
583
|
+
np.NaN,
|
|
584
|
+
np.NaN,
|
|
585
|
+
]
|
|
586
|
+
else:
|
|
587
|
+
activities[["nsip_master", "nsip_project", "nsip_project_id", "nsip_reference_id"]] = (
|
|
588
|
+
activities.apply(
|
|
589
|
+
lambda v: nsip_activity_name_id(v["nsip_name_id"], v["class"]),
|
|
590
|
+
axis=1,
|
|
591
|
+
result_type="expand",
|
|
592
|
+
)
|
|
593
|
+
)
|
|
594
|
+
activities["nsip_project_id"] = activities["nsip_project_id"].astype(int)
|
|
595
|
+
activities["nsip_reference_id"] = activities["nsip_reference_id"].astype(int)
|
|
596
|
+
|
|
597
|
+
return activities
|
|
598
|
+
|
|
599
|
+
|
|
600
|
+
def get_hito_projects():
|
|
601
|
+
"""
|
|
602
|
+
Return a dataframe with the information about all projects with validated declarations
|
|
603
|
+
in the current declaration period defined in Hito and their relationship to NSIP, if relevant.
|
|
604
|
+
|
|
605
|
+
:return: Hito project dataframe
|
|
606
|
+
"""
|
|
607
|
+
|
|
608
|
+
from ositah.utils.hito_db_model import (
|
|
609
|
+
OSITAHProjectDeclaration,
|
|
610
|
+
OSITAHValidation,
|
|
611
|
+
Projet,
|
|
612
|
+
Referentiel,
|
|
613
|
+
)
|
|
614
|
+
|
|
615
|
+
db = get_db()
|
|
616
|
+
|
|
617
|
+
projects_query = (
|
|
618
|
+
Projet.query.join(Referentiel, Referentiel.id == Projet.projet_nsip_referentiel_id)
|
|
619
|
+
.join(OSITAHProjectDeclaration)
|
|
620
|
+
.join(OSITAHValidation)
|
|
621
|
+
.add_entity(Referentiel)
|
|
622
|
+
.add_entity(OSITAHValidation)
|
|
623
|
+
.filter(
|
|
624
|
+
Referentiel.object_class == "projetnsipreferentiel",
|
|
625
|
+
OSITAHValidation.validated,
|
|
626
|
+
)
|
|
627
|
+
)
|
|
628
|
+
projects = pd.read_sql(projects_query.statement, db.session.bind)
|
|
629
|
+
|
|
630
|
+
activities_query = (
|
|
631
|
+
Projet.query.join(Referentiel, Referentiel.id == Projet.activite_nsip_referentiel_id)
|
|
632
|
+
.join(OSITAHProjectDeclaration)
|
|
633
|
+
.join(OSITAHValidation)
|
|
634
|
+
.add_entity(Referentiel)
|
|
635
|
+
.add_entity(OSITAHValidation)
|
|
636
|
+
.filter(
|
|
637
|
+
Referentiel.object_class == "activitensipreferentiel",
|
|
638
|
+
OSITAHValidation.validated,
|
|
639
|
+
)
|
|
640
|
+
)
|
|
641
|
+
activities = pd.read_sql(activities_query.statement, db.session.bind)
|
|
642
|
+
|
|
643
|
+
projects_activities = pd.concat([projects, activities], ignore_index=True)
|
|
644
|
+
|
|
645
|
+
projects_activities.drop(
|
|
646
|
+
columns=[
|
|
647
|
+
"id_2",
|
|
648
|
+
"ordre",
|
|
649
|
+
"projet_nsip_referentiel_id",
|
|
650
|
+
"activite_nsip_referentiel_id",
|
|
651
|
+
"agent_id",
|
|
652
|
+
],
|
|
653
|
+
inplace=True,
|
|
654
|
+
)
|
|
655
|
+
projects_activities.rename(columns={"id_1": "referentiel_id"}, inplace=True)
|
|
656
|
+
projects_activities.rename(columns={"libelle_1": "nsip_name_id"}, inplace=True)
|
|
657
|
+
projects_activities.drop_duplicates(subset=["id"], inplace=True)
|
|
658
|
+
|
|
659
|
+
if projects_activities.empty:
|
|
660
|
+
projects_activities[
|
|
661
|
+
["nsip_master", "nsip_project", "nsip_project_id", "nsip_reference_id"]
|
|
662
|
+
] = [np.NaN, np.NaN, np.NaN, np.NaN]
|
|
663
|
+
else:
|
|
664
|
+
projects_activities[
|
|
665
|
+
["nsip_master", "nsip_project", "nsip_project_id", "nsip_reference_id"]
|
|
666
|
+
] = projects_activities.apply(
|
|
667
|
+
lambda v: nsip_activity_name_id(v["nsip_name_id"], v["class"]),
|
|
668
|
+
axis=1,
|
|
669
|
+
result_type="expand",
|
|
670
|
+
)
|
|
671
|
+
projects_activities["nsip_project_id"] = projects_activities["nsip_project_id"].astype(int)
|
|
672
|
+
projects_activities["nsip_reference_id"] = projects_activities["nsip_reference_id"].astype(
|
|
673
|
+
int
|
|
674
|
+
)
|
|
675
|
+
|
|
676
|
+
return projects_activities
|
|
677
|
+
|
|
678
|
+
|
|
679
|
+
def nsip_activity_name_id(hito_name: str, type: str) -> List[str]:
|
|
680
|
+
"""
|
|
681
|
+
Split the NISP activity project name in Hito referentiel in 3 parts: masterproject name,
|
|
682
|
+
project name, project ID and return the project ID as the project ID (3d value) or reference
|
|
683
|
+
ID (4th value) depending on the activity type. The unused ID is set to 0 rather than np.NaN
|
|
684
|
+
or pd.NA as the column may be used in merges.
|
|
685
|
+
|
|
686
|
+
:param hito_name: activity name in Hito referentiel
|
|
687
|
+
:param type: referentiel class
|
|
688
|
+
:return:
|
|
689
|
+
"""
|
|
690
|
+
|
|
691
|
+
m = re.match(
|
|
692
|
+
r"(?P<master>.*?)\s+/\s+(?P<project>.*)\s+\(NSIP ID:\s*(?P<id>\w+)\)$",
|
|
693
|
+
hito_name,
|
|
694
|
+
)
|
|
695
|
+
if m:
|
|
696
|
+
try:
|
|
697
|
+
_ = int(m.group("id"))
|
|
698
|
+
except ValueError:
|
|
699
|
+
print(
|
|
700
|
+
(
|
|
701
|
+
f"ERROR: invalid NSIP ID in Hito referentiel for '{m.group('master')} /"
|
|
702
|
+
f" {m.group('project')}' (ID={m.group('id')})"
|
|
703
|
+
)
|
|
704
|
+
)
|
|
705
|
+
return m.group("master"), m.group("project"), 0, 0
|
|
706
|
+
if type == NSIP_CLASS_PROJECT:
|
|
707
|
+
project_id = m.group("id")
|
|
708
|
+
reference_id = 0
|
|
709
|
+
else:
|
|
710
|
+
project_id = 0
|
|
711
|
+
reference_id = m.group("id")
|
|
712
|
+
return m.group("master"), m.group("project"), project_id, reference_id
|
|
713
|
+
else:
|
|
714
|
+
print(
|
|
715
|
+
(
|
|
716
|
+
f"ERROR: invalid Hito referentiel entry format, cannot be parsed as"
|
|
717
|
+
f" master/project/id ({hito_name})"
|
|
718
|
+
)
|
|
719
|
+
)
|
|
720
|
+
return np.NaN, np.NaN, 0, 0
|
|
721
|
+
|
|
722
|
+
|
|
723
|
+
def get_nsip_declarations(period_date: str, team: str):
|
|
724
|
+
"""
|
|
725
|
+
Return the NSIP declaration list for the declaration period matching a given date (the
|
|
726
|
+
date must be included in the period) as a dataframe
|
|
727
|
+
|
|
728
|
+
:param period_date: date that must be inside the period
|
|
729
|
+
:param team: selected team
|
|
730
|
+
:return: declaration list as a dataframe
|
|
731
|
+
"""
|
|
732
|
+
|
|
733
|
+
global_params = GlobalParams()
|
|
734
|
+
|
|
735
|
+
if global_params.nsip:
|
|
736
|
+
declarations = pd.json_normalize(global_params.nsip.get_declarations(period_date))
|
|
737
|
+
if declarations.empty:
|
|
738
|
+
return declarations
|
|
739
|
+
|
|
740
|
+
declarations.rename(columns={"id": "id_declaration"}, inplace=True)
|
|
741
|
+
# Set NaN to 0 in reference as np.NaN is a float and prevent casting to int. As it will
|
|
742
|
+
# be used in a merge better to have a 0 than a NaN.
|
|
743
|
+
if "project.id" in declarations.columns:
|
|
744
|
+
declarations.loc[declarations["project.id"].isna(), "project.id"] = 0
|
|
745
|
+
declarations["project.id"] = declarations["project.id"].astype(int)
|
|
746
|
+
else:
|
|
747
|
+
declarations["project.id"] = 0
|
|
748
|
+
if "reference.id" in declarations.columns:
|
|
749
|
+
declarations.loc[declarations["reference.id"].isna(), "reference.id"] = 0
|
|
750
|
+
declarations["reference.id"] = declarations["reference.id"].astype(int)
|
|
751
|
+
else:
|
|
752
|
+
declarations["reference.id"] = 0
|
|
753
|
+
declarations["nsip_fullname"] = (
|
|
754
|
+
declarations["agent.lastname"] + " " + declarations["agent.firstname"]
|
|
755
|
+
)
|
|
756
|
+
|
|
757
|
+
if team != TEAM_LIST_ALL_AGENTS:
|
|
758
|
+
team_agents = get_agents(period_date, team)
|
|
759
|
+
agent_emails = team_agents["email_auth"]
|
|
760
|
+
declarations = declarations.merge(
|
|
761
|
+
agent_emails,
|
|
762
|
+
how="inner",
|
|
763
|
+
left_on="agent.email",
|
|
764
|
+
right_on="email_auth",
|
|
765
|
+
suffixes=[None, "_agent"],
|
|
766
|
+
)
|
|
767
|
+
|
|
768
|
+
return declarations
|
|
769
|
+
|
|
770
|
+
else:
|
|
771
|
+
return None
|
|
772
|
+
|
|
773
|
+
|
|
774
|
+
def get_nsip_activities(project_activity: bool):
|
|
775
|
+
"""
|
|
776
|
+
Retrieve laboratory activities defined in NSIP and return them in a dataframe.
|
|
777
|
+
Activities can be either projects or references (other activities).
|
|
778
|
+
|
|
779
|
+
:param project_activity: true for projects, false for other activities
|
|
780
|
+
:return: dataframe or None if NSIP is not configured
|
|
781
|
+
"""
|
|
782
|
+
|
|
783
|
+
global_params = GlobalParams()
|
|
784
|
+
|
|
785
|
+
if global_params.nsip:
|
|
786
|
+
activities = pd.json_normalize(
|
|
787
|
+
global_params.nsip.get_activities(project_activity), record_prefix=True
|
|
788
|
+
)
|
|
789
|
+
if not activities.empty:
|
|
790
|
+
if project_activity:
|
|
791
|
+
activities["ositah_name"] = activities.apply(
|
|
792
|
+
lambda p: nsip2ositah_project_name(p["master_project.name"], p["name"]),
|
|
793
|
+
axis=1,
|
|
794
|
+
)
|
|
795
|
+
else:
|
|
796
|
+
activities["master_project.name"] = activities.apply(
|
|
797
|
+
lambda p: reference_masterproject(p["type"]),
|
|
798
|
+
axis=1,
|
|
799
|
+
)
|
|
800
|
+
activities.drop(
|
|
801
|
+
activities[activities["master_project.name"].isna()].index,
|
|
802
|
+
inplace=True,
|
|
803
|
+
)
|
|
804
|
+
activities["ositah_name"] = activities["name"]
|
|
805
|
+
|
|
806
|
+
return activities
|
|
807
|
+
else:
|
|
808
|
+
return None
|
|
809
|
+
|
|
810
|
+
|
|
811
|
+
def build_activity_libelle(
|
|
812
|
+
nsip_id: str,
|
|
813
|
+
master_project: str,
|
|
814
|
+
project: str,
|
|
815
|
+
):
|
|
816
|
+
"""
|
|
817
|
+
Build Hito project name and referentiel entry name from NSIP master project, project name and
|
|
818
|
+
project id.
|
|
819
|
+
|
|
820
|
+
:param nsip_id: NSIP ID for the project
|
|
821
|
+
:param master_project: master project name
|
|
822
|
+
:param project: project name
|
|
823
|
+
:return: Hito project name, Hito referentiel name
|
|
824
|
+
"""
|
|
825
|
+
|
|
826
|
+
new_project_name = f"{master_project} / {project}"
|
|
827
|
+
new_referentiel_name = f"{new_project_name} (NSIP ID: {nsip_id})"
|
|
828
|
+
return new_project_name, new_referentiel_name
|
|
829
|
+
|
|
830
|
+
|
|
831
|
+
def update_activity_name(
|
|
832
|
+
hito_project_id: str,
|
|
833
|
+
hito_referentiel_id: str,
|
|
834
|
+
nsip_id: str,
|
|
835
|
+
master_project: str,
|
|
836
|
+
project: str,
|
|
837
|
+
):
|
|
838
|
+
"""
|
|
839
|
+
Update a project name in Hito, both in the referentiel and in the project/activity table.
|
|
840
|
+
|
|
841
|
+
:param hito_project_id: Hito project ID
|
|
842
|
+
:param hito_referentiel_id: Hito referentiel ID for the project
|
|
843
|
+
:param nsip_id: NSIP ID for the project
|
|
844
|
+
:param master_project: master project name
|
|
845
|
+
:param project: project name
|
|
846
|
+
:return: 0 if update succeeded, non-zero if an error occured, error_msg if
|
|
847
|
+
an error occured
|
|
848
|
+
"""
|
|
849
|
+
|
|
850
|
+
from ositah.utils.hito_db_model import Projet, Referentiel
|
|
851
|
+
|
|
852
|
+
db = get_db()
|
|
853
|
+
|
|
854
|
+
status = 0 # Assume success
|
|
855
|
+
error_msg = ""
|
|
856
|
+
new_project_name, new_referentiel_name = build_activity_libelle(
|
|
857
|
+
nsip_id,
|
|
858
|
+
master_project,
|
|
859
|
+
project,
|
|
860
|
+
)
|
|
861
|
+
|
|
862
|
+
try:
|
|
863
|
+
referentiel_entry = Referentiel.query.filter(Referentiel.id == hito_referentiel_id).first()
|
|
864
|
+
project_entry = Projet.query.filter(Projet.id == hito_project_id).first()
|
|
865
|
+
referentiel_entry.libelle = new_referentiel_name
|
|
866
|
+
project_entry.libelle = new_project_name
|
|
867
|
+
change_log_msg = f"Modifié le {datetime.now()}"
|
|
868
|
+
if project_entry.description:
|
|
869
|
+
project_entry.description += f"; {change_log_msg}"
|
|
870
|
+
else:
|
|
871
|
+
project_entry.description = change_log_msg
|
|
872
|
+
db.session.commit()
|
|
873
|
+
except Exception as e:
|
|
874
|
+
status = 1
|
|
875
|
+
error_msg = getattr(e, "message", repr(e))
|
|
876
|
+
db.session.rollback()
|
|
877
|
+
|
|
878
|
+
return status, error_msg
|
|
879
|
+
|
|
880
|
+
|
|
881
|
+
def add_activity(
|
|
882
|
+
nsip_id: str,
|
|
883
|
+
master_project: str,
|
|
884
|
+
project: str,
|
|
885
|
+
activity_teams: List[str],
|
|
886
|
+
project_activity: bool,
|
|
887
|
+
):
|
|
888
|
+
"""
|
|
889
|
+
Adds a new project in Hito referenciel and in Hito project/activity table
|
|
890
|
+
|
|
891
|
+
:param nsip_id: NSIP ID for the project
|
|
892
|
+
:param master_project: master project name
|
|
893
|
+
:param project: project name
|
|
894
|
+
:param activity_teams: list of team IDs associated with the project
|
|
895
|
+
:param project_activity: if True it is a NSIP project, else a NSIP activity
|
|
896
|
+
:return: 0 if update succeeded, non-zero if an error occured, error_msg if
|
|
897
|
+
an error occured
|
|
898
|
+
"""
|
|
899
|
+
|
|
900
|
+
from ositah.utils.hito_db_model import Projet, Referentiel, Team
|
|
901
|
+
|
|
902
|
+
db = get_db()
|
|
903
|
+
|
|
904
|
+
status = 0 # Assume success
|
|
905
|
+
error_msg = ""
|
|
906
|
+
project_name, referentiel_name = build_activity_libelle(
|
|
907
|
+
nsip_id,
|
|
908
|
+
master_project,
|
|
909
|
+
project,
|
|
910
|
+
)
|
|
911
|
+
|
|
912
|
+
if project_activity:
|
|
913
|
+
entry_class = "projetnsipreferentiel"
|
|
914
|
+
entry_order = NSIP_PROJECT_ORDER
|
|
915
|
+
else:
|
|
916
|
+
entry_class = "activitensipreferentiel"
|
|
917
|
+
entry_order = NSIP_ACIVITY_ORDER
|
|
918
|
+
|
|
919
|
+
try:
|
|
920
|
+
referentiel_entry = Referentiel(
|
|
921
|
+
libelle=referentiel_name,
|
|
922
|
+
object_class=entry_class,
|
|
923
|
+
ordre=entry_order,
|
|
924
|
+
)
|
|
925
|
+
activity_entry = Projet(
|
|
926
|
+
libelle=project_name,
|
|
927
|
+
description=f"Créé le {datetime.now()}",
|
|
928
|
+
ordre=entry_order,
|
|
929
|
+
)
|
|
930
|
+
if activity_teams:
|
|
931
|
+
activity_entry.teams = Team.query.filter(Team.id.in_(activity_teams)).all()
|
|
932
|
+
db.session.add(referentiel_entry)
|
|
933
|
+
db.session.add(activity_entry)
|
|
934
|
+
db.session.commit()
|
|
935
|
+
# Define relationship between activity and referentiel entry after creating them so that
|
|
936
|
+
# the referentiel ID generated by the DB server can be accessed
|
|
937
|
+
if project_activity:
|
|
938
|
+
activity_entry.projet_nsip_referentiel_id = referentiel_entry.id
|
|
939
|
+
else:
|
|
940
|
+
activity_entry.activite_nsip_referentiel_id = referentiel_entry.id
|
|
941
|
+
db.session.commit()
|
|
942
|
+
|
|
943
|
+
except Exception as e:
|
|
944
|
+
status = 1
|
|
945
|
+
error_msg = getattr(e, "message", repr(e))
|
|
946
|
+
db.session.rollback()
|
|
947
|
+
|
|
948
|
+
return status, error_msg
|
|
949
|
+
|
|
950
|
+
|
|
951
|
+
def remove_activity(
|
|
952
|
+
hito_project_id: str,
|
|
953
|
+
hito_referentiel_id: str,
|
|
954
|
+
nsip_id: str,
|
|
955
|
+
project_activity: bool,
|
|
956
|
+
):
|
|
957
|
+
"""
|
|
958
|
+
Remove the association between a Hito activity (project or reference) and NSIP. The Hito
|
|
959
|
+
activity is kept as it may be referenced by other objects but its description is updated
|
|
960
|
+
to mention that it is no longer in NSIP. The project name is updated so that it appears in the
|
|
961
|
+
pseudo-masterproject NSIP_DELETED_MASTERPROJECT. Associated teams are removed.
|
|
962
|
+
|
|
963
|
+
:param hito_project_id: Hito project ID
|
|
964
|
+
:param hito_referentiel_id: Hito referentiel ID for the project
|
|
965
|
+
:param nsip_id: NSIP ID for the project
|
|
966
|
+
:param project_activity: if True it is a NSIP project, else a NSIP activity
|
|
967
|
+
:return: 0 if update succeeded, non-zero if an error occured, error_msg if
|
|
968
|
+
an error occured
|
|
969
|
+
"""
|
|
970
|
+
|
|
971
|
+
from ositah.utils.hito_db_model import Projet, Referentiel
|
|
972
|
+
|
|
973
|
+
db = get_db()
|
|
974
|
+
|
|
975
|
+
status = 0 # Assume success
|
|
976
|
+
error_msg = ""
|
|
977
|
+
|
|
978
|
+
try:
|
|
979
|
+
referentiel_entry = Referentiel.query.filter(Referentiel.id == hito_referentiel_id).first()
|
|
980
|
+
db.session.query()
|
|
981
|
+
activity_entry = Projet.query.filter(Projet.id == hito_project_id).first()
|
|
982
|
+
if project_activity:
|
|
983
|
+
activity_entry.projet_nsip_referentiel_id = None
|
|
984
|
+
else:
|
|
985
|
+
activity_entry.activite_nsip_referentiel_id = None
|
|
986
|
+
change_log_msg = f"Desactivé le {datetime.now()} (NSIP ID={nsip_id})"
|
|
987
|
+
if activity_entry.description:
|
|
988
|
+
activity_entry.description += f"; {change_log_msg}"
|
|
989
|
+
else:
|
|
990
|
+
activity_entry.description = change_log_msg
|
|
991
|
+
activity_entry.libelle = f"{MASTERPROJECT_DELETED_ACTIVITY} / {activity_entry.libelle}"
|
|
992
|
+
activity_entry.ordre = DISABLED_ACTIVITY_ORDER
|
|
993
|
+
if len(activity_entry.teams) > 0:
|
|
994
|
+
activity_entry.teams.clear()
|
|
995
|
+
db.session.delete(referentiel_entry)
|
|
996
|
+
db.session.commit()
|
|
997
|
+
except Exception as e:
|
|
998
|
+
status = 1
|
|
999
|
+
error_msg = getattr(e, "message", repr(e))
|
|
1000
|
+
db.session.rollback()
|
|
1001
|
+
|
|
1002
|
+
return status, error_msg
|
|
1003
|
+
|
|
1004
|
+
|
|
1005
|
+
def add_activity_teams(
|
|
1006
|
+
masterproject: str, project: str, team_list: List[str], project_activity: bool
|
|
1007
|
+
):
|
|
1008
|
+
"""
|
|
1009
|
+
Add teams to an activity.
|
|
1010
|
+
|
|
1011
|
+
:param masterproject: activity masterproject name
|
|
1012
|
+
:param project: activity project name
|
|
1013
|
+
:param team_list: list of team names to add
|
|
1014
|
+
:param project_activity: if true, an Hito project else an Hito activity
|
|
1015
|
+
:return: status (0 if success), error_msg (empty if success)
|
|
1016
|
+
"""
|
|
1017
|
+
|
|
1018
|
+
from ositah.utils.hito_db_model import Activite, Projet, Team
|
|
1019
|
+
|
|
1020
|
+
db = get_db()
|
|
1021
|
+
|
|
1022
|
+
status = 0 # Assume success
|
|
1023
|
+
error_msg = ""
|
|
1024
|
+
|
|
1025
|
+
if project_activity:
|
|
1026
|
+
Activity = Projet
|
|
1027
|
+
else:
|
|
1028
|
+
Activity = Activite
|
|
1029
|
+
|
|
1030
|
+
activity_name = ositah2hito_project_name(masterproject, project)
|
|
1031
|
+
activity = Activity.query.filter(Activity.libelle == activity_name).first()
|
|
1032
|
+
|
|
1033
|
+
if activity:
|
|
1034
|
+
try:
|
|
1035
|
+
for team in team_list:
|
|
1036
|
+
team_object = Team.query.filter(Team.nom == team).first()
|
|
1037
|
+
activity.teams.append(team_object)
|
|
1038
|
+
db.session.commit()
|
|
1039
|
+
except Exception as e:
|
|
1040
|
+
status = 1
|
|
1041
|
+
error_msg = getattr(e, "message", repr(e))
|
|
1042
|
+
db.session.rollback()
|
|
1043
|
+
|
|
1044
|
+
return status, error_msg
|
|
1045
|
+
|
|
1046
|
+
|
|
1047
|
+
def remove_activity_teams(
|
|
1048
|
+
masterproject: str, project: str, team_list: List[str], project_activity: bool
|
|
1049
|
+
):
|
|
1050
|
+
"""
|
|
1051
|
+
Remove teams from an activity. If the team is not present in teams list, silently
|
|
1052
|
+
ignore it.
|
|
1053
|
+
|
|
1054
|
+
:param masterproject: activity masterproject name
|
|
1055
|
+
:param project: activity project name
|
|
1056
|
+
:param team_list: list of team names to remove
|
|
1057
|
+
:param project_activity: if true, an Hito project else an Hito activity
|
|
1058
|
+
:return: status (0 if success), error_msg (empty if success)
|
|
1059
|
+
"""
|
|
1060
|
+
|
|
1061
|
+
from ositah.utils.hito_db_model import Activite, Projet, Team
|
|
1062
|
+
|
|
1063
|
+
db = get_db()
|
|
1064
|
+
|
|
1065
|
+
status = 0 # Assume success
|
|
1066
|
+
error_msg = ""
|
|
1067
|
+
|
|
1068
|
+
if project_activity:
|
|
1069
|
+
Activity = Projet
|
|
1070
|
+
else:
|
|
1071
|
+
Activity = Activite
|
|
1072
|
+
|
|
1073
|
+
activity_name = ositah2hito_project_name(masterproject, project)
|
|
1074
|
+
activity = Activity.query.filter(Activity.libelle == activity_name).first()
|
|
1075
|
+
|
|
1076
|
+
if activity:
|
|
1077
|
+
try:
|
|
1078
|
+
for team in team_list:
|
|
1079
|
+
team_object = Team.query.filter(Team.nom == team).first()
|
|
1080
|
+
if team_object in activity.teams:
|
|
1081
|
+
activity.teams.remove(team_object)
|
|
1082
|
+
db.session.commit()
|
|
1083
|
+
except Exception as e:
|
|
1084
|
+
status = 1
|
|
1085
|
+
error_msg = getattr(e, "message", repr(e))
|
|
1086
|
+
db.session.rollback()
|
|
1087
|
+
|
|
1088
|
+
return status, error_msg
|
|
1089
|
+
|
|
1090
|
+
|
|
1091
|
+
def reenable_activity(activity_name: str, project_activity: bool, name_prefix: str = None):
|
|
1092
|
+
"""
|
|
1093
|
+
Reenable a disabled activity. This involves:
|
|
1094
|
+
- Updating master project to match the original one
|
|
1095
|
+
- If it was an NSIP project, recreate the referentiel entry
|
|
1096
|
+
|
|
1097
|
+
:param activity_name: activity name
|
|
1098
|
+
:param project_activity: if true, an Hito project else an Hito activity
|
|
1099
|
+
:param name_prefix: activity name prefix for deleted or local activities
|
|
1100
|
+
:return: status and error message if any
|
|
1101
|
+
"""
|
|
1102
|
+
|
|
1103
|
+
from ositah.utils.hito_db_model import Activite, Projet, Referentiel
|
|
1104
|
+
|
|
1105
|
+
db = get_db()
|
|
1106
|
+
|
|
1107
|
+
status = 0 # Assume success
|
|
1108
|
+
error_msg = ""
|
|
1109
|
+
|
|
1110
|
+
if project_activity:
|
|
1111
|
+
Activity = Projet
|
|
1112
|
+
else:
|
|
1113
|
+
Activity = Activite
|
|
1114
|
+
|
|
1115
|
+
# Retrieve activity attributes and NSIP ID if present in description
|
|
1116
|
+
if name_prefix:
|
|
1117
|
+
activity_full_name = f"{name_prefix} / {activity_name}"
|
|
1118
|
+
else:
|
|
1119
|
+
activity_full_name = activity_name
|
|
1120
|
+
activity_entry = Activity.query.filter(Activity.libelle == activity_full_name).first()
|
|
1121
|
+
|
|
1122
|
+
m = re.search(r"\(NSIP ID\=(?P<id>\d+)\)$", activity_entry.description)
|
|
1123
|
+
if m:
|
|
1124
|
+
nsip_id = m.group("id")
|
|
1125
|
+
else:
|
|
1126
|
+
nsip_id = None
|
|
1127
|
+
|
|
1128
|
+
# Check if an entry exist in the referentiel for the NSIP ID: if not, create it
|
|
1129
|
+
nsip_entry = Referentiel.query.filter(
|
|
1130
|
+
Referentiel.libelle.ilike(f"%(NSIP ID = {nsip_id})")
|
|
1131
|
+
).first()
|
|
1132
|
+
if not nsip_entry:
|
|
1133
|
+
master_project, activity = hito2ositah_project_name(activity_name)
|
|
1134
|
+
project_name, referentiel_name = build_activity_libelle(
|
|
1135
|
+
nsip_id,
|
|
1136
|
+
master_project,
|
|
1137
|
+
activity,
|
|
1138
|
+
)
|
|
1139
|
+
|
|
1140
|
+
if project_activity:
|
|
1141
|
+
entry_class = "projetnsipreferentiel"
|
|
1142
|
+
entry_order = NSIP_PROJECT_ORDER
|
|
1143
|
+
else:
|
|
1144
|
+
entry_class = "activitensipreferentiel"
|
|
1145
|
+
entry_order = NSIP_ACIVITY_ORDER
|
|
1146
|
+
|
|
1147
|
+
referentiel_entry = Referentiel(
|
|
1148
|
+
libelle=referentiel_name,
|
|
1149
|
+
object_class=entry_class,
|
|
1150
|
+
ordre=entry_order,
|
|
1151
|
+
)
|
|
1152
|
+
else:
|
|
1153
|
+
referentiel_entry = None
|
|
1154
|
+
|
|
1155
|
+
# Create referentiel entry if necessary and update activity
|
|
1156
|
+
try:
|
|
1157
|
+
if referentiel_entry:
|
|
1158
|
+
db.session.add(referentiel_entry)
|
|
1159
|
+
activity_entry.libelle = activity_name
|
|
1160
|
+
activity_entry.description = f"Modifié le {datetime.now()}"
|
|
1161
|
+
db.session.commit()
|
|
1162
|
+
# Define relationship between activity and referentiel entry after creating them so that
|
|
1163
|
+
# the referentiel ID generated by the DB server can be accessed
|
|
1164
|
+
if project_activity:
|
|
1165
|
+
activity_entry.projet_nsip_referentiel_id = referentiel_entry.id
|
|
1166
|
+
else:
|
|
1167
|
+
activity_entry.activite_nsip_referentiel_id = referentiel_entry.id
|
|
1168
|
+
db.session.commit()
|
|
1169
|
+
|
|
1170
|
+
except Exception as e:
|
|
1171
|
+
status = 1
|
|
1172
|
+
error_msg = getattr(e, "message", repr(e))
|
|
1173
|
+
db.session.rollback()
|
|
1174
|
+
|
|
1175
|
+
# Clear cached data to force a refresh of project list
|
|
1176
|
+
clear_cached_data()
|
|
1177
|
+
|
|
1178
|
+
return status, error_msg
|