edx-enterprise-data 8.6.0__py3-none-any.whl → 8.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: edx-enterprise-data
3
- Version: 8.6.0
3
+ Version: 8.7.0
4
4
  Summary: Enterprise Reporting
5
5
  Home-page: https://github.com/openedx/edx-enterprise-data
6
6
  Author: edX
@@ -1,4 +1,4 @@
1
- enterprise_data/__init__.py,sha256=EPPAk93ePtVZFtG1D6BcUQ1SPs07JiPQCLJhZ41gW-Y,123
1
+ enterprise_data/__init__.py,sha256=uaNc7nZk0jrnfiaQzk9MA9gKcNIQ30m99eUJ0R4k1X8,123
2
2
  enterprise_data/apps.py,sha256=aF6hZwDfI2oWj95tUTm_2ikHueQj-jLj-u0GrgzpsQI,414
3
3
  enterprise_data/clients.py,sha256=GvQupy5TVYfO_IKC3yzXSAgNP54r-PtIjidM5ws9Iks,3947
4
4
  enterprise_data/constants.py,sha256=uCKjfpdlMYFZJsAj3n9RMw4Cmg5_6s3NuwocO-fch3s,238
@@ -10,10 +10,11 @@ enterprise_data/signals.py,sha256=8eqNPnlvmfsKf19lGWv5xTIuBgQIqR8EZSp9UYzC8Rc,10
10
10
  enterprise_data/urls.py,sha256=bqtKF5OEWEwrNmHG3os-pZNuNsmjlhxEqp7yM4TbPf4,243
11
11
  enterprise_data/utils.py,sha256=kNO4nW_GBpBiIBlVUkCb4Xo0k1oVshT8nDOBP5eWoV8,2643
12
12
  enterprise_data/admin_analytics/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
+ enterprise_data/admin_analytics/completions_utils.py,sha256=kGmLy7x6aD0coNYgzLa5XzJypLkGTT5clDHLSH_QFDE,9442
13
14
  enterprise_data/admin_analytics/constants.py,sha256=6Gc9rP-J3nGgAk3fhzNlyR1HMq8Apjxs9ViWJiYrri4,722
14
15
  enterprise_data/admin_analytics/data_loaders.py,sha256=x1XNYdtJV1G9cv0SeBZqYitRV8-GlJXtEZ2cc2OJU7M,5415
15
16
  enterprise_data/admin_analytics/database.py,sha256=mNS_9xE5h6O7oMMzr6kr6LDTTSNvKzo8vaM-YG8tOd8,1312
16
- enterprise_data/admin_analytics/utils.py,sha256=65HjDdNJkQTHqS6KatHE4UpEzKDMj2_v21O-YBf130Q,10023
17
+ enterprise_data/admin_analytics/utils.py,sha256=w1GpdJCvt-ocwwLEaag4YVO5XH31NCvjwAGFYAiwUZE,10213
17
18
  enterprise_data/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
19
  enterprise_data/api/urls.py,sha256=POqc_KATHdnpMf9zHtpO46pKD5KAlAExtx7G6iylLcU,273
19
20
  enterprise_data/api/v0/__init__.py,sha256=1aAzAYU5hk-RW6cKUxa1645cbZMxn7GIZ7OMjWc9MKI,46
@@ -22,12 +23,13 @@ enterprise_data/api/v0/urls.py,sha256=vzJjqIo_S3AXWs9Us8XTaJc3FnxLbYzAkmLyuDQqum
22
23
  enterprise_data/api/v0/views.py,sha256=4RslZ4NZOU-844bnebEQ71ji2utRY7jEijqC45oQQD0,14380
23
24
  enterprise_data/api/v1/__init__.py,sha256=1aAzAYU5hk-RW6cKUxa1645cbZMxn7GIZ7OMjWc9MKI,46
24
25
  enterprise_data/api/v1/paginators.py,sha256=OHbuBP7hAFJ_ce0UAMfJ1pARMMzqvzVYiYeFMw3xZLU,3592
25
- enterprise_data/api/v1/serializers.py,sha256=oNLxBbHa6CJ7d7mdA0hpmLFjKMe-S6nOIguQkEu_D4Y,11723
26
- enterprise_data/api/v1/urls.py,sha256=wMw-h0NlkOgS1HYUm2FqGOLA-BHBdFZYaL-QNZn0T60,2635
26
+ enterprise_data/api/v1/serializers.py,sha256=SaMFlNRMPIqaKelFZa7nYn3su7DJFS2wJfYZ5sUl_gM,12000
27
+ enterprise_data/api/v1/urls.py,sha256=JLjkMzTiJmHtDjN5_z4hOe0s1ug8Ec3A4Ll1E5pwVmY,3206
27
28
  enterprise_data/api/v1/views/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
29
  enterprise_data/api/v1/views/analytics_enrollments.py,sha256=SvuK53i_4S3etqktoJ0h5ky4dntD176u6DXZuqHTsEg,16352
29
30
  enterprise_data/api/v1/views/base.py,sha256=FTAxlz5EzvAY657wzVgzhJPFSCHHzct7IDcvm71Smt8,866
30
- enterprise_data/api/v1/views/enterprise_admin.py,sha256=RTSRyPfHvbzV_ihSbGjYi0VuE6AjaYZIyqpAKTMYa5Q,8980
31
+ enterprise_data/api/v1/views/enterprise_admin.py,sha256=7f1RHlXxmH8oLr0WLxdGPNsxdhjubwyqNIefb7PMH68,9149
32
+ enterprise_data/api/v1/views/enterprise_completions.py,sha256=LGwwCiJSBVhsm52ROIDAJQkLAaj4Wbxh-A-1INfZbOE,7433
31
33
  enterprise_data/api/v1/views/enterprise_learner.py,sha256=yABjJje3CT8I8YOhWr1_tTkdKtnGJom8eu3EFz_-0BU,18517
32
34
  enterprise_data/api/v1/views/enterprise_offers.py,sha256=VifxgqTLFLVw4extYPlHcN1N_yjXcsYsAlYEnAbpb10,1266
33
35
  enterprise_data/fixtures/enterprise_enrollment.json,sha256=6onPXXR29pMdTdbl_mn81sDi3Re5jkLUZz2TPMB_1IY,5786
@@ -101,9 +103,10 @@ enterprise_data/tests/test_models.py,sha256=MWBY-LY5TPBjZ4GlvpM-h4W-BvRKr2Rml8Bz
101
103
  enterprise_data/tests/test_utils.py,sha256=vbmYM7DMN-lHS2p4yaa0Yd6uSGXd2qoZRDE9X3J4Sec,18385
102
104
  enterprise_data/tests/test_views.py,sha256=UvDRNTxruy5zBK_KgUy2cBMbwlaTW_vkM0-TCXbQZiY,69667
103
105
  enterprise_data/tests/admin_analytics/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
104
- enterprise_data/tests/admin_analytics/mock_enrollments.py,sha256=ApRzq6LxdAqGNNZqGoPSdLbGCf6R-z8qL8FoRf4wvvs,6712
106
+ enterprise_data/tests/admin_analytics/mock_enrollments.py,sha256=kINSGZx7M3CcsVZ04AZAy4AqNGVbRD47OfI2_3dEVSs,7262
105
107
  enterprise_data/tests/admin_analytics/test_analytics_enrollments.py,sha256=WNdJCM52zUJikBTl3VakPIvNiVNvUed-8vk275njSdY,14847
106
108
  enterprise_data/tests/admin_analytics/test_data_loaders.py,sha256=o3denJ4aUS1pI5Crksl4C6m-NtCBm8ynoHBnLkf-v2U,4641
109
+ enterprise_data/tests/admin_analytics/test_enterprise_completions.py,sha256=BXptZVfnTReJfMSog-ZMDnoIe6Gh_sX2GDRzyugxSMI,7356
107
110
  enterprise_data/tests/admin_analytics/test_utils.py,sha256=y33HXy6BDOoftdcz3qYlOYhgx7JSXDki-OLzBdTpiwA,11449
108
111
  enterprise_data/tests/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
109
112
  enterprise_data/tests/api/v0/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -112,7 +115,7 @@ enterprise_data/tests/api/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NM
112
115
  enterprise_data/tests/api/v1/test_serializers.py,sha256=DwgEHcyOP3oqNUPB2O-NkJGeO_cYs9XJiq7791vJLZE,3682
113
116
  enterprise_data/tests/api/v1/test_views.py,sha256=rLqUHfar0HdBNtz33hQxd_0qUUgr7Ku3KwQSQ1B4Ypg,15213
114
117
  enterprise_data/tests/api/v1/views/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
115
- enterprise_data/tests/api/v1/views/test_enterprise_admin.py,sha256=7FCPpfFrv-pisJ9cxt3B-KIf-KM3a7XzQ8MweLp23wI,4783
118
+ enterprise_data/tests/api/v1/views/test_enterprise_admin.py,sha256=At4g5Q4W4SNPk693HxuoEOHtMehvkscLvtXV4dbivvE,4969
116
119
  enterprise_data_roles/__init__.py,sha256=toCpbypm2uDoWVw29_em9gPFNly8vNUS__C0b4TCqEg,112
117
120
  enterprise_data_roles/admin.py,sha256=QNP0VeWE092vZzpyxOA5UJK1nNGl5e71B1J0RCwo_nU,998
118
121
  enterprise_data_roles/apps.py,sha256=nKi8TyuQ5Q6WGtKs5QeXvUTc3N-YQjKhyBnm2EM3Bng,260
@@ -153,8 +156,8 @@ enterprise_reporting/tests/test_send_enterprise_reports.py,sha256=WtL-RqGgu2x5PP
153
156
  enterprise_reporting/tests/test_utils.py,sha256=Zt_TA0LVb-B6fQGkUkAKKVlUKKnQh8jnw1US1jKe7g8,9493
154
157
  enterprise_reporting/tests/test_vertica_client.py,sha256=-R2yNCGUjRtoXwLMBloVFQkFYrJoo613VCr61gwI3kQ,140
155
158
  enterprise_reporting/tests/utils.py,sha256=xms2LM7DV3wczXEfctOK1ddel1EE0J_YSr17UzbCDy4,1401
156
- edx_enterprise_data-8.6.0.dist-info/LICENSE,sha256=dql8h4yceoMhuzlcK0TT_i-NgTFNIZsgE47Q4t3dUYI,34520
157
- edx_enterprise_data-8.6.0.dist-info/METADATA,sha256=UT5OiAzT8dXg3nZ9psbT8eqAbGjEnDD1GjRb8YEitv8,1569
158
- edx_enterprise_data-8.6.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
159
- edx_enterprise_data-8.6.0.dist-info/top_level.txt,sha256=f5F2kU-dob6MqiHJpgZkFzoCD5VMhsdpkTV5n9Tvq3I,59
160
- edx_enterprise_data-8.6.0.dist-info/RECORD,,
159
+ edx_enterprise_data-8.7.0.dist-info/LICENSE,sha256=dql8h4yceoMhuzlcK0TT_i-NgTFNIZsgE47Q4t3dUYI,34520
160
+ edx_enterprise_data-8.7.0.dist-info/METADATA,sha256=6ZncVDwM9a49UHdde9Rk1fhjP1BEaradbGjKCHSX_WE,1569
161
+ edx_enterprise_data-8.7.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
162
+ edx_enterprise_data-8.7.0.dist-info/top_level.txt,sha256=f5F2kU-dob6MqiHJpgZkFzoCD5VMhsdpkTV5n9Tvq3I,59
163
+ edx_enterprise_data-8.7.0.dist-info/RECORD,,
@@ -2,4 +2,4 @@
2
2
  Enterprise data api application. This Django app exposes API endpoints used by enterprises.
3
3
  """
4
4
 
5
- __version__ = "8.6.0"
5
+ __version__ = "8.7.0"
@@ -0,0 +1,261 @@
1
+ """This module contains utility functions for completions analytics."""
2
+ from enterprise_data.utils import date_filter
3
+
4
+
5
+ def date_aggregation(level, group, date, df, type_="count"):
6
+ """Perform date aggregation on a DataFrame.
7
+
8
+ This function aggregates data based on the specified level of aggregation (e.g., daily, weekly, monthly, quarterly)
9
+ and returns the aggregated data.
10
+
11
+ Args:
12
+ level (str): The level of aggregation. Possible values are "Daily", "Weekly", "Monthly", and "Quarterly".
13
+ group (list): A list of column names to group the data by.
14
+ date (str): The name of the date column in the DataFrame.
15
+ df (pandas.DataFrame): The DataFrame containing the data to be aggregated.
16
+ type_ (str, optional): The type of aggregation to perform. Possible values
17
+ are "count" and "sum". Defaults to "count".
18
+
19
+ Returns:
20
+ pandas.DataFrame: The aggregated data.
21
+
22
+ """
23
+ if type_ == "count":
24
+ if level == "Daily":
25
+ df = df.groupby(group).size().reset_index()
26
+ group.append("count")
27
+ df.columns = group
28
+ elif level == "Weekly":
29
+ df[date] = df[date].dt.to_period("W").dt.start_time
30
+ df = df.groupby(group).size().reset_index()
31
+ group.append("count")
32
+ df.columns = group
33
+ elif level == "Monthly":
34
+ df[date] = df[date].dt.to_period("M").dt.start_time
35
+ df = df.groupby(group).size().reset_index()
36
+ group.append("count")
37
+ df.columns = group
38
+ elif level == "Quarterly":
39
+ df[date] = df[date].dt.to_period("Q").dt.start_time
40
+ df = df.groupby(group).size().reset_index()
41
+ group.append("count")
42
+ df.columns = group
43
+ elif type_ == "sum":
44
+ if level == "Daily":
45
+ df = df.groupby(group).sum().reset_index()
46
+ group.append("sum")
47
+ df.columns = group
48
+ elif level == "Weekly":
49
+ df[date] = df[date].dt.to_period("W").dt.start_time
50
+ df = df.groupby(group).sum().reset_index()
51
+ group.append("sum")
52
+ df.columns = group
53
+ elif level == "Monthly":
54
+ df[date] = df[date].dt.to_period("M").dt.start_time
55
+ df = df.groupby(group).sum().reset_index()
56
+ group.append("sum")
57
+ df.columns = group
58
+ elif level == "Quarterly":
59
+ df[date] = df[date].dt.to_period("Q").dt.start_time
60
+ df = df.groupby(group).sum().reset_index()
61
+ group.append("sum")
62
+ df.columns = group
63
+
64
+ return df
65
+
66
+
67
+ def calculation(calc, df, type_="count"):
68
+ """Perform a calculation on the given DataFrame based on the specified calculation type.
69
+
70
+ Args:
71
+ calc (str): The calculation type. Possible values are "Total", "Running Total",
72
+ "Moving Average (3 Period)", and "Moving Average (7 Period)".
73
+ df (pandas.DataFrame): The filtered enrollments data.
74
+ type_ (str, optional): The type of calculation to perform. Default is "count".
75
+
76
+ Returns:
77
+ pandas.DataFrame: The aggregated data after performing the calculation.
78
+ """
79
+ if type_ == "count":
80
+ if calc == "Total":
81
+ pass
82
+ elif calc == "Running Total":
83
+ df["count"] = df.groupby("enroll_type")["count"].cumsum()
84
+ elif calc == "Moving Average (3 Period)":
85
+ df["count"] = (
86
+ df.groupby("enroll_type")["count"]
87
+ .rolling(3)
88
+ .mean()
89
+ .droplevel(level=[0])
90
+ )
91
+ elif calc == "Moving Average (7 Period)":
92
+ df["count"] = (
93
+ df.groupby("enroll_type")["count"]
94
+ .rolling(7)
95
+ .mean()
96
+ .droplevel(level=[0])
97
+ )
98
+ elif type_ == "sum":
99
+ if calc == "Total":
100
+ pass
101
+ elif calc == "Running Total":
102
+ df["sum"] = df.groupby("enroll_type")["sum"].cumsum()
103
+ elif calc == "Moving Average (3 Period)":
104
+ df["sum"] = (
105
+ df.groupby("enroll_type")["sum"].rolling(3).mean().droplevel(level=[0])
106
+ )
107
+ elif calc == "Moving Average (7 Period)":
108
+ df["sum"] = (
109
+ df.groupby("enroll_type")["sum"].rolling(7).mean().droplevel(level=[0])
110
+ )
111
+
112
+ return df
113
+
114
+
115
+ def get_completions_over_time(start_date, end_date, dff, date_agg, calc):
116
+ """Get agreggated data for completions over time graph.
117
+
118
+ Args:
119
+ start_date (datetime): The start date for the date filter.
120
+ end_date (datetime): The end date for the date filter.
121
+ dff (pandas.DataFrame): enrollments data
122
+ date_agg (str): It denotes the granularity of the aggregated date which can be Daily, Weekly, Monthly, Quarterly
123
+ calc (str): Calculations denoiated the period for the running averages. It can be Total, Running Total, Moving
124
+ Average (3 Period), Moving Average (7 Period)
125
+ """
126
+
127
+ dff = dff[dff["has_passed"] == 1]
128
+
129
+ # Date filtering.
130
+ dff = date_filter(start=start_date, end=end_date, data_frame=dff, date_column="passed_date")
131
+
132
+ # Date aggregation.
133
+ dff = date_aggregation(
134
+ level=date_agg, group=["passed_date", "enroll_type"], date="passed_date", df=dff
135
+ )
136
+
137
+ # Calculating metric.
138
+ dff = calculation(calc=calc, df=dff)
139
+
140
+ return dff
141
+
142
+
143
+ def get_top_courses_by_completions(start_date, end_date, dff):
144
+ """Get top 10 courses by completions.
145
+
146
+ Args:
147
+ start_date (datetime): The start date for the date filter.
148
+ end_date (datetime): The end date for the date filter.
149
+ dff (pandas.DataFrame): Enrollments data
150
+ """
151
+
152
+ dff = dff[dff["has_passed"] == 1]
153
+
154
+ # Date filtering.
155
+ dff = date_filter(start=start_date, end=end_date, data_frame=dff, date_column="passed_date")
156
+
157
+ courses = list(
158
+ dff.groupby(["course_key"]).size().sort_values(ascending=False)[:10].index
159
+ )
160
+
161
+ dff = (
162
+ dff[dff.course_key.isin(courses)]
163
+ .groupby(["course_key", "course_title", "enroll_type"])
164
+ .size()
165
+ .reset_index()
166
+ )
167
+ dff.columns = ["course_key", "course_title", "enroll_type", "count"]
168
+
169
+ return dff
170
+
171
+
172
+ def get_top_subjects_by_completions(start_date, end_date, dff):
173
+ """Get top 10 subjects by completions.
174
+
175
+ Args:
176
+ start_date (datetime): The start date for the date filter.
177
+ end_date (datetime): The end date for the date filter.
178
+ dff (pandas.DataFrame): Enrollments data
179
+ """
180
+
181
+ dff = dff[dff["has_passed"] == 1]
182
+
183
+ # Date filtering.
184
+ dff = date_filter(start=start_date, end=end_date, data_frame=dff, date_column="passed_date")
185
+
186
+ subjects = list(
187
+ dff.groupby(["course_subject"]).size().sort_values(ascending=False)[:10].index
188
+ )
189
+
190
+ dff = (
191
+ dff[dff.course_subject.isin(subjects)]
192
+ .groupby(["course_subject", "enroll_type"])
193
+ .size()
194
+ .reset_index()
195
+ )
196
+ dff.columns = ["course_subject", "enroll_type", "count"]
197
+
198
+ return dff
199
+
200
+
201
+ def get_csv_data_for_completions_over_time(
202
+ start_date, end_date, enrollments, date_agg, calc
203
+ ):
204
+ """Get csv data for completions over time graph.
205
+
206
+ Args:
207
+ start_date (datetime): The start date for the date filter.
208
+ end_date (datetime): The end date for the date filter.
209
+ enrollments (pandas.DataFrame): Filtered enrollments data
210
+ date_agg (str): it denotes the granularity of the aggregated date which can be Daily, Weekly, Monthly, Quarterly
211
+ calc (str): calculations denoiated the period for the running averages. It can be Total, Running Total, Moving
212
+ Average (3 Period), Moving Average (7 Period)
213
+
214
+ Returns:
215
+ dict: csv data
216
+ """
217
+
218
+ dff = get_completions_over_time(start_date, end_date, enrollments, date_agg, calc)
219
+ dff = dff.pivot(index="passed_date", columns="enroll_type", values="count")
220
+ filename = (
221
+ f"Completions Timeseries, {start_date} - {end_date} ({date_agg} {calc}).csv"
222
+ )
223
+ return {"filename": filename, "data": dff}
224
+
225
+
226
+ def get_csv_data_for_top_courses_by_completions(start_date, end_date, enrollments):
227
+ """Get csv data for top 10 courses by completions.
228
+
229
+ Args:
230
+ start_date (datetime): The start date for the date filter.
231
+ end_date (datetime): The end date for the date filter.
232
+ enrollments (pandas.DataFrame): Filtered enrollments data
233
+
234
+ Returns:
235
+ dict: csv data
236
+ """
237
+
238
+ dff = get_top_courses_by_completions(start_date, end_date, enrollments)
239
+ dff = dff.pivot(
240
+ index=["course_key", "course_title"], columns="enroll_type", values="count"
241
+ )
242
+ filename = f"Top 10 Courses by Completions, {start_date} - {end_date}.csv"
243
+ return {"filename": filename, "data": dff}
244
+
245
+
246
+ def get_csv_data_for_top_subjects_by_completions(start_date, end_date, enrollments):
247
+ """Get csv data for top 10 subjects by completions.
248
+
249
+ Args:
250
+ start_date (datetime): The start date for the date filter.
251
+ end_date (datetime): The end date for the date filter.
252
+ enrollments (pandas.DataFrame): Filtered enrollments data
253
+
254
+ Returns:
255
+ dict: csv data
256
+ """
257
+
258
+ dff = get_top_subjects_by_completions(start_date, end_date, enrollments)
259
+ dff = dff.pivot(index="course_subject", columns="enroll_type", values="count")
260
+ filename = f"Top 10 Subjects by Completions, {start_date} - {end_date}.csv"
261
+ return {"filename": filename, "data": dff}
@@ -18,6 +18,9 @@ class ChartType(Enum):
18
18
  BUBBLE = 'bubble'
19
19
  TOP_SKILLS_ENROLLMENT = 'top_skills_enrollment'
20
20
  TOP_SKILLS_COMPLETION = 'top_skills_completion'
21
+ COMPLETIONS_OVER_TIME = 'completions_over_time'
22
+ TOP_COURSES_BY_COMPLETIONS = 'top_courses_by_completions'
23
+ TOP_SUBJECTS_BY_COMPLETIONS = 'top_subjects_by_completions'
21
24
 
22
25
 
23
26
  def granularity_aggregation(level, group, date, data_frame, aggregation_type="count"):
@@ -172,7 +175,7 @@ def get_skills_bubble_chart_df(skills_filtered):
172
175
  """ Get the skills data for the bubble chart.
173
176
 
174
177
  Args:
175
- skills_filtered (list): The skills data.
178
+ skills_filtered (pandas.DataFrame): The skills data.
176
179
 
177
180
  Returns:
178
181
  (pandas.DataFrame): The skills data for the bubble chart.
@@ -206,6 +206,11 @@ class AdminAnalyticsAggregatesQueryParamsSerializer(serializers.Serializer): #
206
206
  """
207
207
  start_date = serializers.DateField(required=False)
208
208
  end_date = serializers.DateField(required=False)
209
+ granularity = serializers.CharField(required=False)
210
+ calculation = serializers.CharField(required=False)
211
+ response_type = serializers.CharField(required=False)
212
+ page = serializers.IntegerField(required=False)
213
+ chart_type = serializers.CharField(required=False)
209
214
 
210
215
  def validate(self, attrs):
211
216
  """
@@ -8,6 +8,7 @@ from rest_framework.routers import DefaultRouter
8
8
  from django.urls import re_path
9
9
 
10
10
  from enterprise_data.api.v1.views import enterprise_admin as enterprise_admin_views
11
+ from enterprise_data.api.v1.views import enterprise_completions as enterprise_completions_views
11
12
  from enterprise_data.api.v1.views import enterprise_learner as enterprise_learner_views
12
13
  from enterprise_data.api.v1.views import enterprise_offers as enterprise_offers_views
13
14
  from enterprise_data.api.v1.views.analytics_enrollments import (
@@ -71,6 +72,16 @@ urlpatterns = [
71
72
  enterprise_admin_views.EnterpriseAdminAnalyticsSkillsView.as_view(),
72
73
  name='enterprise-admin-analytics-skills'
73
74
  ),
75
+ re_path(
76
+ fr'^admin/anlaytics/(?P<enterprise_id>{UUID4_REGEX})/completions/stats$',
77
+ enterprise_completions_views.EnterrpiseAdminCompletionsStatsView.as_view(),
78
+ name='enterprise-admin-analytics-completions-stats'
79
+ ),
80
+ re_path(
81
+ fr'^admin/anlaytics/(?P<enterprise_id>{UUID4_REGEX})/completions$',
82
+ enterprise_completions_views.EnterrpiseAdminCompletionsView.as_view(),
83
+ name='enterprise-admin-analytics-completions'
84
+ ),
74
85
  ]
75
86
 
76
87
  urlpatterns += router.urls
@@ -189,17 +189,21 @@ class EnterpriseAdminAnalyticsSkillsView(APIView):
189
189
  data=request.GET
190
190
  )
191
191
  serializer.is_valid(raise_exception=True)
192
-
193
- start_date = serializer.data.get("start_date")
194
- end_date = serializer.data.get("end_date", datetime.now())
195
-
196
192
  last_updated_at = fetch_max_enrollment_datetime()
197
193
  cache_expiry = (
198
194
  last_updated_at + timedelta(days=1) if last_updated_at else datetime.now()
199
195
  )
196
+
197
+ enrollment = fetch_and_cache_enrollments_data(
198
+ enterprise_id, cache_expiry
199
+ ).copy()
200
+
201
+ start_date = serializer.data.get('start_date', enrollment.enterprise_enrollment_date.min())
202
+ end_date = serializer.data.get('end_date', datetime.now())
203
+
200
204
  skills = fetch_and_cache_skills_data(enterprise_id, cache_expiry).copy()
201
205
 
202
- if request.GET.get("format") == "csv":
206
+ if serializer.data.get('response_type') == 'csv':
203
207
  csv_data = get_top_skills_csv_data(skills, start_date, end_date)
204
208
  response = HttpResponse(content_type='text/csv')
205
209
  filename = f"Skills by Enrollment and Completion, {start_date} - {end_date}.csv"
@@ -0,0 +1,177 @@
1
+ """Views for enterprise admin completions analytics."""
2
+ import datetime
3
+ from datetime import datetime, timedelta
4
+
5
+ from edx_rbac.decorators import permission_required
6
+ from edx_rest_framework_extensions.auth.jwt.authentication import JwtAuthentication
7
+ from rest_framework.response import Response
8
+ from rest_framework.status import HTTP_200_OK
9
+ from rest_framework.views import APIView
10
+
11
+ from django.http import HttpResponse
12
+
13
+ from enterprise_data.admin_analytics.completions_utils import (
14
+ get_completions_over_time,
15
+ get_csv_data_for_completions_over_time,
16
+ get_csv_data_for_top_courses_by_completions,
17
+ get_csv_data_for_top_subjects_by_completions,
18
+ get_top_courses_by_completions,
19
+ get_top_subjects_by_completions,
20
+ )
21
+ from enterprise_data.admin_analytics.constants import CALCULATION, GRANULARITY
22
+ from enterprise_data.admin_analytics.data_loaders import fetch_max_enrollment_datetime
23
+ from enterprise_data.admin_analytics.utils import ChartType, fetch_and_cache_enrollments_data
24
+ from enterprise_data.api.v1 import serializers
25
+ from enterprise_data.api.v1.paginators import AdvanceAnalyticsPagination
26
+ from enterprise_data.utils import date_filter
27
+
28
+
29
+ class EnterrpiseAdminCompletionsStatsView(APIView):
30
+ """
31
+ API for getting the enterprise admin completions.
32
+ """
33
+ authentication_classes = (JwtAuthentication,)
34
+ http_method_names = ['get']
35
+
36
+ @permission_required(
37
+ "can_access_enterprise", fn=lambda request, enterprise_id: enterprise_id
38
+ )
39
+ def get(self, request, enterprise_id):
40
+ """
41
+ HTTP GET endpoint to retrieve the enterprise admin completions
42
+ """
43
+ serializer = serializers.AdminAnalyticsAggregatesQueryParamsSerializer(
44
+ data=request.GET
45
+ )
46
+ serializer.is_valid(raise_exception=True)
47
+
48
+ last_updated_at = fetch_max_enrollment_datetime()
49
+ cache_expiry = (
50
+ last_updated_at + timedelta(days=1) if last_updated_at else datetime.now()
51
+ )
52
+
53
+ enrollments = fetch_and_cache_enrollments_data(
54
+ enterprise_id, cache_expiry
55
+ ).copy()
56
+ # Use start and end date if provided by the client, if client has not provided then use
57
+ # 1. minimum enrollment date from the data as the start_date
58
+ # 2. today's date as the end_date
59
+ start_date = serializer.data.get(
60
+ "start_date", enrollments.enterprise_enrollment_date.min()
61
+ )
62
+ end_date = serializer.data.get("end_date", datetime.now())
63
+
64
+ if serializer.data.get('response_type') == 'csv':
65
+ chart_type = serializer.data.get('chart_type')
66
+ response = HttpResponse(content_type='text/csv')
67
+ csv_data = {}
68
+
69
+ if chart_type == ChartType.COMPLETIONS_OVER_TIME.value:
70
+ csv_data = get_csv_data_for_completions_over_time(
71
+ start_date=start_date,
72
+ end_date=end_date,
73
+ enrollments=enrollments.copy(),
74
+ date_agg=serializer.data.get('granularity', GRANULARITY.DAILY.value),
75
+ calc=serializer.data.get('calculation', CALCULATION.TOTAL.value),
76
+ )
77
+ elif chart_type == ChartType.TOP_COURSES_BY_COMPLETIONS.value:
78
+ csv_data = get_csv_data_for_top_courses_by_completions(
79
+ start_date=start_date, end_date=end_date, enrollments=enrollments.copy()
80
+ )
81
+ elif chart_type == ChartType.TOP_SUBJECTS_BY_COMPLETIONS.value:
82
+ csv_data = get_csv_data_for_top_subjects_by_completions(
83
+ start_date=start_date, end_date=end_date, enrollments=enrollments.copy()
84
+ )
85
+ filename = csv_data['filename']
86
+ response['Content-Disposition'] = f'attachment; filename="{filename}"'
87
+ csv_data['data'].to_csv(path_or_buf=response)
88
+ return response
89
+
90
+ completions_over_time = get_completions_over_time(
91
+ start_date=start_date,
92
+ end_date=end_date,
93
+ dff=enrollments.copy(),
94
+ date_agg=serializer.data.get('granularity', GRANULARITY.DAILY.value),
95
+ calc=serializer.data.get('calculation', CALCULATION.TOTAL.value),
96
+ )
97
+ top_courses_by_completions = get_top_courses_by_completions(
98
+ start_date=start_date, end_date=end_date, dff=enrollments.copy()
99
+ )
100
+ top_subjects_by_completions = get_top_subjects_by_completions(
101
+ start_date=start_date, end_date=end_date, dff=enrollments.copy()
102
+ )
103
+
104
+ return Response(
105
+ data={
106
+ 'completions_over_time': completions_over_time.to_dict(
107
+ orient="records"
108
+ ),
109
+ 'top_courses_by_completions': top_courses_by_completions.to_dict(
110
+ orient="records"
111
+ ),
112
+ 'top_subjects_by_completions': top_subjects_by_completions.to_dict(
113
+ orient="records"
114
+ ),
115
+ },
116
+ status=HTTP_200_OK,
117
+ )
118
+
119
+
120
+ class EnterrpiseAdminCompletionsView(APIView):
121
+ """
122
+ API for getting the enterprise admin completions.
123
+ """
124
+ authentication_classes = (JwtAuthentication,)
125
+ http_method_names = ['get']
126
+ pagination_class = AdvanceAnalyticsPagination
127
+
128
+ @permission_required(
129
+ "can_access_enterprise", fn=lambda request, enterprise_id: enterprise_id
130
+ )
131
+ def get(self, request, enterprise_id):
132
+ """
133
+ HTTP GET endpoint to retrieve the enterprise admin completions
134
+ """
135
+ serializer = serializers.AdminAnalyticsAggregatesQueryParamsSerializer(
136
+ data=request.GET
137
+ )
138
+ serializer.is_valid(raise_exception=True)
139
+
140
+ last_updated_at = fetch_max_enrollment_datetime()
141
+ cache_expiry = (
142
+ last_updated_at + timedelta(days=1) if last_updated_at else datetime.now()
143
+ )
144
+
145
+ enrollments = fetch_and_cache_enrollments_data(
146
+ enterprise_id, cache_expiry
147
+ ).copy()
148
+ # Use start and end date if provided by the client, if client has not provided then use
149
+ # 1. minimum enrollment date from the data as the start_date
150
+ # 2. today's date as the end_date
151
+ start_date = serializer.data.get(
152
+ 'start_date', enrollments.enterprise_enrollment_date.min()
153
+ )
154
+ end_date = serializer.data.get('end_date', datetime.now())
155
+
156
+ dff = enrollments[enrollments['has_passed'] == 1]
157
+
158
+ # Date filtering
159
+ dff = date_filter(start=start_date, end=end_date, data_frame=dff, date_column='passed_date')
160
+
161
+ dff = dff[['email', 'course_title', 'course_subject', 'passed_date']]
162
+ dff['passed_date'] = dff['passed_date'].dt.date
163
+ dff = dff.sort_values(by="passed_date", ascending=False).reset_index(drop=True)
164
+
165
+ if serializer.data.get('response_type') == 'csv':
166
+ response = HttpResponse(content_type='text/csv')
167
+ filename = f"Individual Completions, {start_date} - {end_date}.csv"
168
+ response['Content-Disposition'] = f'attachment; filename="{filename}"'
169
+ dff.to_csv(path_or_buf=response, index=False)
170
+ return response
171
+
172
+ paginator = self.pagination_class()
173
+ page = paginator.paginate_queryset(dff, request)
174
+ serialized_data = page.data.to_dict(orient='records')
175
+ response = paginator.get_paginated_response(serialized_data)
176
+
177
+ return response
@@ -3,6 +3,7 @@
3
3
  import pandas as pd
4
4
 
5
5
  from enterprise_data.admin_analytics.constants import ENROLLMENT_CSV
6
+ from enterprise_data.admin_analytics.utils import ChartType
6
7
 
7
8
  ENROLLMENTS = [
8
9
  {
@@ -71,7 +72,7 @@ ENROLLMENTS = [
71
72
  "cert_awarded": 0,
72
73
  "date_certificate_created_raw": None,
73
74
  "passed_date_raw": None,
74
- "passed_date": None,
75
+ "passed_date": '2022-08-24',
75
76
  "has_passed": 0,
76
77
  },
77
78
  {
@@ -94,7 +95,7 @@ ENROLLMENTS = [
94
95
  "cert_awarded": 0,
95
96
  "date_certificate_created_raw": None,
96
97
  "passed_date_raw": None,
97
- "passed_date": None,
98
+ "passed_date": "2022-08-24",
98
99
  "has_passed": 0,
99
100
  },
100
101
  {
@@ -117,7 +118,7 @@ ENROLLMENTS = [
117
118
  "cert_awarded": 0,
118
119
  "date_certificate_created_raw": None,
119
120
  "passed_date_raw": None,
120
- "passed_date": None,
121
+ "passed_date": "2022-08-20",
121
122
  "has_passed": 0,
122
123
  },
123
124
  ]
@@ -142,6 +143,21 @@ ENROLLMENT_STATS_CSVS = {
142
143
  b'course_subject,certificate\nbusiness-management,2\ncommunication,1\nmedicine,1\nsocial-sciences,1\n'
143
144
  )
144
145
  }
146
+ COMPLETIONS_STATS_CSVS = {
147
+ ChartType.COMPLETIONS_OVER_TIME.value: (
148
+ b'passed_date,certificate\n'
149
+ b'2021-08-25,1\n'
150
+ b'2021-09-01,2\n'
151
+ ),
152
+ ChartType.TOP_COURSES_BY_COMPLETIONS.value: (
153
+ b'course_key,course_title,certificate\n'
154
+ b'hEmW+tvk03,Re-engineered tangible approach,2\n'
155
+ ),
156
+ ChartType.TOP_SUBJECTS_BY_COMPLETIONS.value: (
157
+ b'course_subject,certificate\n'
158
+ b'business-management,2\n'
159
+ )
160
+ }
145
161
 
146
162
 
147
163
  def enrollments_dataframe():
@@ -0,0 +1,202 @@
1
+ """Unitest for EnterpriseAdminCompletionsStatsView."""
2
+ from datetime import datetime
3
+
4
+ import ddt
5
+ from mock import patch
6
+ from rest_framework import status
7
+ from rest_framework.reverse import reverse
8
+ from rest_framework.test import APITransactionTestCase
9
+
10
+ from enterprise_data.admin_analytics.utils import ChartType
11
+ from enterprise_data.tests.admin_analytics.mock_enrollments import (
12
+ COMPLETIONS_STATS_CSVS,
13
+ ENROLLMENTS,
14
+ enrollments_dataframe,
15
+ )
16
+ from enterprise_data.tests.mixins import JWTTestMixin
17
+ from enterprise_data.tests.test_utils import UserFactory
18
+ from enterprise_data_roles.constants import ENTERPRISE_DATA_ADMIN_ROLE
19
+ from enterprise_data_roles.models import EnterpriseDataFeatureRole, EnterpriseDataRoleAssignment
20
+
21
+
22
+ @ddt.ddt
23
+ class TestCompletionstStatsAPI(JWTTestMixin, APITransactionTestCase):
24
+ """Tests for EnterrpiseAdminCompletionsStatsView."""
25
+
26
+ def setUp(self):
27
+ """
28
+ Setup method.
29
+ """
30
+ super().setUp()
31
+ self.user = UserFactory(is_staff=True)
32
+ role, __ = EnterpriseDataFeatureRole.objects.get_or_create(
33
+ name=ENTERPRISE_DATA_ADMIN_ROLE
34
+ )
35
+ self.role_assignment = EnterpriseDataRoleAssignment.objects.create(
36
+ role=role, user=self.user
37
+ )
38
+ self.client.force_authenticate(user=self.user)
39
+
40
+ self.enterprise_id = "ee5e6b3a-069a-4947-bb8d-d2dbc323396c"
41
+ self.set_jwt_cookie()
42
+
43
+ self.url = reverse(
44
+ "v1:enterprise-admin-analytics-completions-stats",
45
+ kwargs={"enterprise_id": self.enterprise_id},
46
+ )
47
+
48
+ fetch_max_enrollment_datetime_patcher = patch(
49
+ 'enterprise_data.api.v1.views.enterprise_completions.fetch_max_enrollment_datetime',
50
+ return_value=datetime.now()
51
+ )
52
+
53
+ fetch_max_enrollment_datetime_patcher.start()
54
+ self.addCleanup(fetch_max_enrollment_datetime_patcher.stop)
55
+
56
+ @patch(
57
+ "enterprise_data.api.v1.views.enterprise_completions.fetch_and_cache_enrollments_data"
58
+ )
59
+ def test_get(self, mock_fetch_and_cache_enrollments_data):
60
+ """
61
+ Test the GET method for the EnterrpiseAdminCompletionsStatsView works.
62
+ """
63
+ mock_fetch_and_cache_enrollments_data.return_value = enrollments_dataframe()
64
+
65
+ params = {
66
+ "start_date": "2020-01-01",
67
+ "end_date": "2025-08-09",
68
+ "calculation": "Running Total",
69
+ "granularity": "Daily",
70
+ }
71
+ response = self.client.get(self.url, params)
72
+ assert response.status_code == status.HTTP_200_OK
73
+ data = response.json()
74
+ assert data == {
75
+ "completions_over_time": [
76
+ {
77
+ "passed_date": "2021-08-25T00:00:00",
78
+ "enroll_type": "certificate",
79
+ "count": 1,
80
+ },
81
+ {
82
+ "passed_date": "2021-09-01T00:00:00",
83
+ "enroll_type": "certificate",
84
+ "count": 2,
85
+ },
86
+ ],
87
+ "top_courses_by_completions": [
88
+ {
89
+ "course_key": "hEmW+tvk03",
90
+ "course_title": "Re-engineered tangible approach",
91
+ "enroll_type": "certificate",
92
+ "count": 2,
93
+ }
94
+ ],
95
+ "top_subjects_by_completions": [
96
+ {
97
+ "course_subject": "business-management",
98
+ "enroll_type": "certificate",
99
+ "count": 2,
100
+ }
101
+ ],
102
+ }
103
+
104
+ @patch("enterprise_data.api.v1.views.enterprise_completions.fetch_and_cache_enrollments_data")
105
+ @ddt.data(
106
+ ChartType.COMPLETIONS_OVER_TIME.value,
107
+ ChartType.TOP_COURSES_BY_COMPLETIONS.value,
108
+ ChartType.TOP_SUBJECTS_BY_COMPLETIONS.value,
109
+ )
110
+ def test_get_csv(self, chart_type, mock_fetch_and_cache_enrollments_data):
111
+ """
112
+ Test that EnterrpiseAdminCompletionsStatsView return correct CSV data.
113
+ """
114
+ mock_fetch_and_cache_enrollments_data.return_value = enrollments_dataframe()
115
+ params = {
116
+ 'start_date': '2020-01-01',
117
+ 'end_date': '2025-08-09',
118
+ 'calculation': 'Running Total',
119
+ 'granularity': 'Daily',
120
+ 'response_type': 'csv',
121
+ 'chart_type': chart_type,
122
+ }
123
+ response = self.client.get(self.url, params)
124
+ assert response.status_code == status.HTTP_200_OK
125
+ assert response["Content-Type"] == "text/csv"
126
+ # verify the response content
127
+ assert response.content == COMPLETIONS_STATS_CSVS[chart_type]
128
+
129
+
130
+ @ddt.ddt
131
+ class TestCompletionstAPI(JWTTestMixin, APITransactionTestCase):
132
+ """Tests for EnterrpiseAdminCompletionsView."""
133
+
134
+ def setUp(self):
135
+ """
136
+ Setup method.
137
+ """
138
+ super().setUp()
139
+ self.user = UserFactory(is_staff=True)
140
+ role, __ = EnterpriseDataFeatureRole.objects.get_or_create(
141
+ name=ENTERPRISE_DATA_ADMIN_ROLE
142
+ )
143
+ self.role_assignment = EnterpriseDataRoleAssignment.objects.create(
144
+ role=role, user=self.user
145
+ )
146
+ self.client.force_authenticate(user=self.user)
147
+
148
+ self.enterprise_id = "ee5e6b3a-069a-4947-bb8d-d2dbc323396c"
149
+ self.set_jwt_cookie()
150
+
151
+ self.url = reverse(
152
+ "v1:enterprise-admin-analytics-completions",
153
+ kwargs={"enterprise_id": self.enterprise_id},
154
+ )
155
+
156
+ fetch_max_enrollment_datetime_patcher = patch(
157
+ 'enterprise_data.api.v1.views.enterprise_completions.fetch_max_enrollment_datetime',
158
+ return_value=datetime.now()
159
+ )
160
+
161
+ fetch_max_enrollment_datetime_patcher.start()
162
+ self.addCleanup(fetch_max_enrollment_datetime_patcher.stop)
163
+
164
+ def verify_enrollment_data(self, results, results_count):
165
+ """Verify the received enrollment data."""
166
+ attrs = [
167
+ "email",
168
+ "course_title",
169
+ "course_subject",
170
+ "passed_date",
171
+ ]
172
+
173
+ assert len(results) == results_count
174
+
175
+ filtered_data = []
176
+ for enrollment in ENROLLMENTS:
177
+ for result in results:
178
+ if enrollment["email"] == result["email"]:
179
+ filtered_data.append({attr: enrollment[attr] for attr in attrs})
180
+ break
181
+ received_data = sorted(results, key=lambda x: x["email"])
182
+ expected_data = sorted(filtered_data, key=lambda x: x["email"])
183
+ assert received_data == expected_data
184
+
185
+ @patch(
186
+ "enterprise_data.api.v1.views.enterprise_completions.fetch_and_cache_enrollments_data"
187
+ )
188
+ def test_get(self, mock_fetch_and_cache_enrollments_data):
189
+ """
190
+ Test the GET method for the EnterrpiseAdminCompletionsView works.
191
+ """
192
+ mock_fetch_and_cache_enrollments_data.return_value = enrollments_dataframe()
193
+
194
+ response = self.client.get(self.url, {"page": 1, "page_size": 1})
195
+ assert response.status_code == status.HTTP_200_OK
196
+ data = response.json()
197
+ assert data["next"] == f"http://testserver{self.url}?page=2&page_size=1"
198
+ assert data["previous"] is None
199
+ assert data["current_page"] == 1
200
+ assert data["num_pages"] == 2
201
+ assert data["count"] == 2
202
+ self.verify_enrollment_data(data["results"], 1)
@@ -108,6 +108,10 @@ class TestEnterpriseAdminAnalyticsSkillsView(JWTTestMixin, APITransactionTestCas
108
108
  return [
109
109
  list(item.values()) for item in get_dummy_skills_data(self.enterprise_id)
110
110
  ]
111
+ elif 'fact_enrollment_admin_dash' in query:
112
+ return [
113
+ list(item.values()) for item in get_dummy_enrollments_data(self.enterprise_id, 15)
114
+ ]
111
115
  else:
112
116
  return [
113
117
  list(item.values()) for item in get_dummy_skills_data(self.enterprise_id)