edx-enterprise-data 9.1.0__py3-none-any.whl → 9.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. {edx_enterprise_data-9.1.0.dist-info → edx_enterprise_data-9.2.0.dist-info}/METADATA +1 -1
  2. {edx_enterprise_data-9.1.0.dist-info → edx_enterprise_data-9.2.0.dist-info}/RECORD +22 -25
  3. enterprise_data/__init__.py +1 -1
  4. enterprise_data/admin_analytics/constants.py +3 -17
  5. enterprise_data/admin_analytics/data_loaders.py +0 -125
  6. enterprise_data/admin_analytics/database/queries/fact_engagement_admin_dash.py +85 -0
  7. enterprise_data/admin_analytics/database/tables/fact_engagement_admin_dash.py +50 -0
  8. enterprise_data/api/v1/serializers.py +1 -38
  9. enterprise_data/api/v1/urls.py +2 -2
  10. enterprise_data/api/v1/views/analytics_completions.py +0 -2
  11. enterprise_data/api/v1/views/analytics_engagements.py +0 -2
  12. enterprise_data/api/v1/views/analytics_enrollments.py +0 -2
  13. enterprise_data/api/v1/views/analytics_leaderboard.py +65 -102
  14. enterprise_data/api/v1/views/enterprise_learner.py +17 -14
  15. enterprise_data/renderers.py +2 -2
  16. enterprise_data/tests/admin_analytics/mock_analytics_data.py +15 -60
  17. enterprise_data/tests/admin_analytics/test_analytics_leaderboard.py +48 -81
  18. enterprise_data/tests/admin_analytics/test_data_loaders.py +1 -58
  19. enterprise_data/utils.py +0 -16
  20. enterprise_data/admin_analytics/utils.py +0 -180
  21. enterprise_data/api/v1/paginators.py +0 -121
  22. enterprise_data/tests/admin_analytics/test_utils.py +0 -102
  23. {edx_enterprise_data-9.1.0.dist-info → edx_enterprise_data-9.2.0.dist-info}/LICENSE +0 -0
  24. {edx_enterprise_data-9.1.0.dist-info → edx_enterprise_data-9.2.0.dist-info}/WHEEL +0 -0
  25. {edx_enterprise_data-9.1.0.dist-info → edx_enterprise_data-9.2.0.dist-info}/top_level.txt +0 -0
@@ -1,144 +1,107 @@
1
- """Advance Analytics for Leaderboard"""
1
+ """
2
+ Views for fetching leaderboard data.
3
+ """
4
+
2
5
  from datetime import datetime
3
6
  from logging import getLogger
4
7
 
5
- import numpy as np
6
- import pandas as pd
7
8
  from edx_rbac.decorators import permission_required
8
9
  from edx_rest_framework_extensions.auth.jwt.authentication import JwtAuthentication
9
- from rest_framework.views import APIView
10
+ from rest_framework.viewsets import ViewSet
10
11
 
11
12
  from django.http import StreamingHttpResponse
12
13
 
13
14
  from enterprise_data.admin_analytics.constants import ResponseType
14
- from enterprise_data.admin_analytics.utils import (
15
- fetch_and_cache_engagements_data,
16
- fetch_and_cache_enrollments_data,
17
- fetch_engagements_cache_expiry_timestamp,
18
- fetch_enrollments_cache_expiry_timestamp,
19
- )
20
- from enterprise_data.api.v1.paginators import AdvanceAnalyticsPagination
15
+ from enterprise_data.admin_analytics.database.tables import FactEngagementAdminDashTable, FactEnrollmentAdminDashTable
21
16
  from enterprise_data.api.v1.serializers import AdvanceAnalyticsQueryParamSerializer
17
+ from enterprise_data.api.v1.views.base import AnalyticsPaginationMixin
22
18
  from enterprise_data.renderers import LeaderboardCSVRenderer
23
- from enterprise_data.utils import date_filter
24
19
 
25
20
  LOGGER = getLogger(__name__)
26
21
 
27
22
 
28
- class AdvanceAnalyticsLeaderboardView(APIView):
23
+ class AdvanceAnalyticsLeaderboardView(AnalyticsPaginationMixin, ViewSet):
29
24
  """
30
- API for getting the advance analytics leaderboard data.
25
+ View to handle requests for enterprise leaderboard data.
26
+
27
+ Here is the list of URLs that are handled by this view:
28
+ 1. `enterprise_data_api_v1.enterprise-admin-analytics-leaderboard-list`: Get leaderboard data.
31
29
  """
32
30
  authentication_classes = (JwtAuthentication,)
33
- pagination_class = AdvanceAnalyticsPagination
34
- http_method_names = ['get']
31
+ http_method_names = ('get', )
35
32
 
36
33
  @permission_required('can_access_enterprise', fn=lambda request, enterprise_uuid: enterprise_uuid)
37
- def get(self, request, enterprise_uuid):
38
- """Get leaderboard data"""
34
+ def list(self, request, enterprise_uuid):
35
+ """
36
+ Get individual leaderboard data for the enterprise.
37
+ """
38
+ # Remove hyphens from the UUID
39
+ enterprise_uuid = enterprise_uuid.replace('-', '')
40
+
39
41
  serializer = AdvanceAnalyticsQueryParamSerializer(data=request.GET)
40
42
  serializer.is_valid(raise_exception=True)
41
-
42
- enrollments_cache_expiry = fetch_enrollments_cache_expiry_timestamp()
43
- enrollments_df = fetch_and_cache_enrollments_data(enterprise_uuid, enrollments_cache_expiry)
44
-
45
- engagements_cache_expiry = fetch_engagements_cache_expiry_timestamp()
46
- engagements_df = fetch_and_cache_engagements_data(enterprise_uuid, engagements_cache_expiry)
47
-
48
- start_date = serializer.data.get('start_date', enrollments_df.enterprise_enrollment_date.min())
49
- end_date = serializer.data.get('end_date', datetime.now())
50
- response_type = serializer.data.get('response_type', ResponseType.JSON.value)
51
-
52
- LOGGER.info(
53
- "Leaderboard data requested for enterprise [%s] from [%s] to [%s]",
43
+ min_enrollment_date, _ = FactEnrollmentAdminDashTable().get_enrollment_date_range(
54
44
  enterprise_uuid,
55
- start_date,
56
- end_date,
57
- )
58
-
59
- # only include learners who have passed the course
60
- enrollments_df = enrollments_df[enrollments_df["has_passed"] == 1]
61
-
62
- # filter enrollments by date
63
- enrollments_df = date_filter(start_date, end_date, enrollments_df, "passed_date")
64
-
65
- completions = enrollments_df.groupby(["email"]).size().reset_index()
66
- completions.columns = ["email", "course_completions"]
67
-
68
- # filter engagements by date
69
- engagements_df = date_filter(start_date, end_date, engagements_df, "activity_date")
70
-
71
- engage = (
72
- engagements_df.groupby(["email"])
73
- .agg({"is_engaged": ["sum"], "learning_time_seconds": ["sum"]})
74
- .reset_index()
75
- )
76
- engage.columns = ["email", "daily_sessions", "learning_time_seconds"]
77
- engage["learning_time_hours"] = round(
78
- engage["learning_time_seconds"].astype("float") / 60 / 60, 1
79
45
  )
80
46
 
81
- # if daily_sessions is 0, set average_session_length to 0 becuase otherwise it will be `inf`
82
- engage["average_session_length"] = np.where(
83
- engage["daily_sessions"] == 0,
84
- 0,
85
- round(engage["learning_time_hours"] / engage["daily_sessions"].astype("float"), 1)
47
+ # get values from query params or use default values
48
+ start_date = serializer.data.get('start_date', min_enrollment_date)
49
+ end_date = serializer.data.get('end_date', datetime.now())
50
+ page = serializer.data.get('page', 1)
51
+ page_size = serializer.data.get('page_size', 100)
52
+ leaderboard = FactEngagementAdminDashTable().get_all_leaderboard_data(
53
+ enterprise_customer_uuid=enterprise_uuid,
54
+ start_date=start_date,
55
+ end_date=end_date,
56
+ limit=page_size,
57
+ offset=(page - 1) * page_size,
86
58
  )
87
-
88
- leaderboard_df = engage.merge(completions, on="email", how="left")
89
- leaderboard_df = leaderboard_df.sort_values(
90
- by=["learning_time_hours", "daily_sessions", "course_completions"],
91
- ascending=[False, False, False],
59
+ total_count = FactEngagementAdminDashTable().get_leaderboard_data_count(
60
+ enterprise_customer_uuid=enterprise_uuid,
61
+ start_date=start_date,
62
+ end_date=end_date,
92
63
  )
93
-
94
- # move the aggregated row with email 'null' to the end of the table
95
- idx = leaderboard_df.index[leaderboard_df['email'] == 'null']
96
- leaderboard_df.loc[idx, 'email'] = 'learners who have not shared consent'
97
- leaderboard_df = pd.concat([leaderboard_df.drop(idx), leaderboard_df.loc[idx]])
98
-
99
- # convert `nan` values to `None` because `nan` is not JSON serializable
100
- leaderboard_df = leaderboard_df.replace(np.nan, None)
64
+ response_type = request.query_params.get('response_type', ResponseType.JSON.value)
101
65
 
102
66
  LOGGER.info(
103
- "Leaderboard data prepared for enterprise [%s] from [%s] to [%s]",
67
+ 'Leaderboard data requested for enterprise [%s] from [%s] to [%s]',
104
68
  enterprise_uuid,
105
69
  start_date,
106
70
  end_date,
107
71
  )
108
72
 
109
73
  if response_type == ResponseType.CSV.value:
110
- filename = f"""Leaderboard, {start_date} - {end_date}.csv"""
111
- leaderboard_df = leaderboard_df[
112
- [
113
- "email",
114
- "learning_time_hours",
115
- "daily_sessions",
116
- "average_session_length",
117
- "course_completions",
118
- ]
119
- ]
74
+ filename = f'Leaderboard, {start_date} - {end_date}.csv'
75
+
120
76
  return StreamingHttpResponse(
121
- LeaderboardCSVRenderer().render(self._stream_serialized_data(leaderboard_df)),
122
- content_type="text/csv",
123
- headers={
124
- "Content-Disposition": f'attachment; filename="{filename}"',
125
- "Access-Control-Expose-Headers": "Content-Disposition"
126
- },
77
+ LeaderboardCSVRenderer().render(self._stream_serialized_data(
78
+ enterprise_uuid, start_date, end_date, total_count
79
+ )),
80
+ content_type='text/csv',
81
+ headers={'Content-Disposition': f'attachment; filename="{filename}"'},
127
82
  )
128
83
 
129
- paginator = self.pagination_class()
130
- page = paginator.paginate_queryset(leaderboard_df, request)
131
- serialized_data = page.data.to_dict(orient='records')
132
- response = paginator.get_paginated_response(serialized_data)
133
-
134
- return response
84
+ return self.get_paginated_response(
85
+ request=request,
86
+ records=leaderboard,
87
+ page=page,
88
+ page_size=page_size,
89
+ total_count=total_count,
90
+ )
135
91
 
136
- def _stream_serialized_data(self, leaderboard_df, chunk_size=50000):
92
+ @staticmethod
93
+ def _stream_serialized_data(enterprise_uuid, start_date, end_date, total_count, page_size=50000):
137
94
  """
138
95
  Stream the serialized data.
139
96
  """
140
- total_rows = leaderboard_df.shape[0]
141
- for start_index in range(0, total_rows, chunk_size):
142
- end_index = min(start_index + chunk_size, total_rows)
143
- chunk = leaderboard_df.iloc[start_index:end_index]
144
- yield from chunk.to_dict(orient='records')
97
+ offset = 0
98
+ while offset < total_count:
99
+ leaderboard = FactEngagementAdminDashTable().get_all_leaderboard_data(
100
+ enterprise_customer_uuid=enterprise_uuid,
101
+ start_date=start_date,
102
+ end_date=end_date,
103
+ limit=page_size,
104
+ offset=offset,
105
+ )
106
+ yield from leaderboard
107
+ offset += page_size
@@ -6,7 +6,6 @@ from datetime import date, timedelta
6
6
  from logging import getLogger
7
7
  from uuid import UUID
8
8
 
9
- from edx_django_utils.cache import TieredCache
10
9
  from rest_framework import filters, viewsets
11
10
  from rest_framework.decorators import action
12
11
  from rest_framework.response import Response
@@ -24,7 +23,7 @@ from enterprise_data.filters import AuditEnrollmentsFilterBackend, AuditUsersEnr
24
23
  from enterprise_data.models import EnterpriseLearner, EnterpriseLearnerEnrollment
25
24
  from enterprise_data.paginators import EnterpriseEnrollmentsPagination
26
25
  from enterprise_data.renderers import EnrollmentsCSVRenderer
27
- from enterprise_data.utils import get_cache_key, subtract_one_month
26
+ from enterprise_data.utils import subtract_one_month
28
27
 
29
28
  from .base import EnterpriseViewSetMixin
30
29
 
@@ -81,18 +80,22 @@ class EnterpriseLearnerEnrollmentViewSet(EnterpriseViewSetMixin, viewsets.ReadOn
81
80
  return EnterpriseLearnerEnrollment.objects.none()
82
81
 
83
82
  enterprise_customer_uuid = self.kwargs['enterprise_id']
84
- cache_key = get_cache_key(
85
- resource='enterprise-learner',
86
- enterprise_customer=enterprise_customer_uuid,
87
- )
88
- cached_response = TieredCache.get_cached_response(cache_key)
89
- if cached_response.is_found:
90
- return cached_response.value
91
- else:
92
- enrollments = EnterpriseLearnerEnrollment.objects.filter(enterprise_customer_uuid=enterprise_customer_uuid)
93
- enrollments = self.apply_filters(enrollments)
94
- TieredCache.set_all_tiers(cache_key, enrollments, DEFAULT_LEARNER_CACHE_TIMEOUT)
95
- return enrollments
83
+
84
+ # TODO: Created a ticket ENT0-9531 to fix the cache issue
85
+ # Reason for Comenting cache: Remove the cache for this ViewSet
86
+ # becuae the cache is not working as expected
87
+ # cache_key = get_cache_key(
88
+ # resource='enterprise-learner',
89
+ # enterprise_customer=enterprise_customer_uuid,
90
+ # )
91
+ # cached_response = TieredCache.get_cached_response(cache_key)
92
+ # if cached_response.is_found:
93
+ # return cached_response.value
94
+ # else:
95
+ enrollments = EnterpriseLearnerEnrollment.objects.filter(enterprise_customer_uuid=enterprise_customer_uuid)
96
+ enrollments = self.apply_filters(enrollments)
97
+ # TieredCache.set_all_tiers(cache_key, enrollments, DEFAULT_LEARNER_CACHE_TIMEOUT)
98
+ return enrollments
96
99
 
97
100
  def list(self, request, *args, **kwargs):
98
101
  """
@@ -82,7 +82,7 @@ class LeaderboardCSVRenderer(CSVStreamingRenderer):
82
82
  header = [
83
83
  'email',
84
84
  'learning_time_hours',
85
- 'daily_sessions',
85
+ 'session_count',
86
86
  'average_session_length',
87
- 'course_completions',
87
+ 'course_completion_count',
88
88
  ]
@@ -1,6 +1,6 @@
1
- """Mock data for enrollments"""
2
-
3
- import pandas as pd
1
+ """
2
+ Mock data for admin analytics tests.
3
+ """
4
4
 
5
5
  ENROLLMENTS = [
6
6
  {
@@ -328,132 +328,87 @@ ENGAGEMENTS = [
328
328
  ]
329
329
 
330
330
 
331
- def enrollments_dataframe():
332
- """Return a DataFrame of enrollments."""
333
- enrollments = pd.DataFrame(ENROLLMENTS)
334
-
335
- enrollments['enterprise_enrollment_date'] = enrollments['enterprise_enrollment_date'].astype('datetime64[ns]')
336
- enrollments['date_certificate_awarded'] = enrollments['date_certificate_awarded'].astype('datetime64[ns]')
337
- enrollments['date_certificate_created_raw'] = enrollments['date_certificate_created_raw'].astype('datetime64[ns]')
338
- enrollments['passed_date_raw'] = enrollments['passed_date_raw'].astype('datetime64[ns]')
339
- enrollments['passed_date'] = enrollments['passed_date'].astype('datetime64[ns]')
340
-
341
- return enrollments
342
-
343
-
344
- def engagements_dataframe():
345
- """Return a DataFrame of engagements."""
346
- engagements = pd.DataFrame(ENGAGEMENTS)
347
- engagements['activity_date'] = engagements['activity_date'].astype('datetime64[ns]')
348
- return engagements
349
-
350
-
351
- def leaderboard_csv_content():
352
- """Return the CSV content of leaderboard."""
353
- return (
354
- b'email,learning_time_hours,daily_sessions,average_session_length,course_completions\r\n'
355
- b'paul77@example.org,4.4,1,4.4,\r\nseth57@example.org,2.7,1,2.7,\r\n'
356
- b'weaverpatricia@example.net,2.6,1,2.6,\r\nwebertodd@example.com,1.5,1,1.5,\r\n'
357
- b'yferguson@example.net,1.3,1,1.3,\r\nyallison@example.org,1.2,1,1.2,\r\n'
358
- b'padillamichelle@example.org,1.0,1,1.0,\r\ncaseyjohnny@example.com,0.0,0,0.0,\r\n'
359
- b'crystal86@example.net,0.0,0,0.0,\r\ngraceperez@example.com,0.0,0,0.0,\r\n'
360
- b'mackwilliam@example.com,0.0,0,0.0,\r\nsamanthaclarke@example.org,0.0,0,0.0,\r\n'
361
- )
362
-
363
-
364
331
  LEADERBOARD_RESPONSE = [
365
332
  {
366
333
  "email": "paul77@example.org",
367
- "daily_sessions": 1,
368
- "learning_time_seconds": 15753,
334
+ "sessions": 1,
369
335
  "learning_time_hours": 4.4,
370
336
  "average_session_length": 4.4,
371
337
  "course_completions": None,
372
338
  },
373
339
  {
374
340
  "email": "seth57@example.org",
375
- "daily_sessions": 1,
376
- "learning_time_seconds": 9898,
341
+ "sessions": 1,
377
342
  "learning_time_hours": 2.7,
378
343
  "average_session_length": 2.7,
379
344
  "course_completions": None,
380
345
  },
381
346
  {
382
347
  "email": "weaverpatricia@example.net",
383
- "daily_sessions": 1,
384
- "learning_time_seconds": 9441,
348
+ "sessions": 1,
385
349
  "learning_time_hours": 2.6,
386
350
  "average_session_length": 2.6,
387
351
  "course_completions": None,
388
352
  },
389
353
  {
390
354
  "email": "webertodd@example.com",
391
- "daily_sessions": 1,
392
- "learning_time_seconds": 5285,
355
+ "sessions": 1,
393
356
  "learning_time_hours": 1.5,
394
357
  "average_session_length": 1.5,
395
358
  "course_completions": None,
396
359
  },
397
360
  {
398
361
  "email": "yferguson@example.net",
399
- "daily_sessions": 1,
400
- "learning_time_seconds": 4747,
362
+ "sessions": 1,
401
363
  "learning_time_hours": 1.3,
402
364
  "average_session_length": 1.3,
403
365
  "course_completions": None,
404
366
  },
405
367
  {
406
368
  "email": "yallison@example.org",
407
- "daily_sessions": 1,
408
- "learning_time_seconds": 4335,
369
+ "sessions": 1,
409
370
  "learning_time_hours": 1.2,
410
371
  "average_session_length": 1.2,
411
372
  "course_completions": None,
412
373
  },
413
374
  {
414
375
  "email": "padillamichelle@example.org",
415
- "daily_sessions": 1,
416
- "learning_time_seconds": 3724,
376
+ "sessions": 1,
417
377
  "learning_time_hours": 1.0,
418
378
  "average_session_length": 1.0,
419
379
  "course_completions": None,
420
380
  },
421
381
  {
422
382
  "email": "caseyjohnny@example.com",
423
- "daily_sessions": 0,
424
- "learning_time_seconds": 0,
383
+ "sessions": 0,
425
384
  "learning_time_hours": 0.0,
426
385
  "average_session_length": 0.0,
427
386
  "course_completions": None,
428
387
  },
429
388
  {
430
389
  "email": "crystal86@example.net",
431
- "daily_sessions": 0,
432
- "learning_time_seconds": 0,
390
+ "sessions": 0,
433
391
  "learning_time_hours": 0.0,
434
392
  "average_session_length": 0.0,
435
393
  "course_completions": None,
436
394
  },
437
395
  {
438
396
  "email": "graceperez@example.com",
439
- "daily_sessions": 0,
440
- "learning_time_seconds": 21,
397
+ "sessions": 0,
441
398
  "learning_time_hours": 0.0,
442
399
  "average_session_length": 0.0,
443
400
  "course_completions": None,
444
401
  },
445
402
  {
446
403
  "email": "mackwilliam@example.com",
447
- "daily_sessions": 0,
448
- "learning_time_seconds": 0,
404
+ "sessions": 0,
449
405
  "learning_time_hours": 0.0,
450
406
  "average_session_length": 0.0,
451
407
  "course_completions": None,
452
408
  },
453
409
  {
454
410
  "email": "samanthaclarke@example.org",
455
- "daily_sessions": 0,
456
- "learning_time_seconds": 29,
411
+ "sessions": 0,
457
412
  "learning_time_hours": 0.0,
458
413
  "average_session_length": 0.0,
459
414
  "course_completions": None,
@@ -1,4 +1,6 @@
1
- """Unittests for analytics_enrollments.py"""
1
+ """
2
+ Tests for leaderboard API.
3
+ """
2
4
 
3
5
  from datetime import datetime
4
6
 
@@ -9,13 +11,7 @@ from rest_framework.reverse import reverse
9
11
  from rest_framework.test import APITransactionTestCase
10
12
 
11
13
  from enterprise_data.admin_analytics.constants import ResponseType
12
- from enterprise_data.tests.admin_analytics.mock_analytics_data import (
13
- ENROLLMENTS,
14
- LEADERBOARD_RESPONSE,
15
- engagements_dataframe,
16
- enrollments_dataframe,
17
- leaderboard_csv_content,
18
- )
14
+ from enterprise_data.tests.admin_analytics.mock_analytics_data import ENROLLMENTS, LEADERBOARD_RESPONSE
19
15
  from enterprise_data.tests.mixins import JWTTestMixin
20
16
  from enterprise_data.tests.test_utils import UserFactory
21
17
  from enterprise_data_roles.constants import ENTERPRISE_DATA_ADMIN_ROLE
@@ -44,25 +40,16 @@ class TestLeaderboardAPI(JWTTestMixin, APITransactionTestCase):
44
40
  self.set_jwt_cookie()
45
41
 
46
42
  self.url = reverse(
47
- "v1:enterprise-admin-analytics-leaderboard",
43
+ "v1:enterprise-admin-analytics-leaderboard-list",
48
44
  kwargs={"enterprise_uuid": self.enterprise_uuid},
49
45
  )
50
-
51
- fetch_max_enrollment_datetime_patcher = patch(
52
- 'enterprise_data.api.v1.views.analytics_leaderboard.fetch_enrollments_cache_expiry_timestamp',
53
- return_value=datetime.now()
46
+ get_enrollment_date_range_patcher = patch(
47
+ 'enterprise_data.api.v1.views.analytics_enrollments.FactEnrollmentAdminDashTable.get_enrollment_date_range',
48
+ return_value=(datetime.now(), datetime.now())
54
49
  )
55
50
 
56
- fetch_max_enrollment_datetime_patcher.start()
57
- self.addCleanup(fetch_max_enrollment_datetime_patcher.stop)
58
-
59
- fetch_max_engagement_datetime_patcher = patch(
60
- 'enterprise_data.api.v1.views.analytics_leaderboard.fetch_engagements_cache_expiry_timestamp',
61
- return_value=datetime.now()
62
- )
63
-
64
- fetch_max_engagement_datetime_patcher.start()
65
- self.addCleanup(fetch_max_engagement_datetime_patcher.stop)
51
+ get_enrollment_date_range_patcher.start()
52
+ self.addCleanup(get_enrollment_date_range_patcher.stop)
66
53
 
67
54
  def verify_enrollment_data(self, results, results_count):
68
55
  """Verify the received enrollment data."""
@@ -87,77 +74,57 @@ class TestLeaderboardAPI(JWTTestMixin, APITransactionTestCase):
87
74
  expected_data = sorted(filtered_data, key=lambda x: x["email"])
88
75
  assert received_data == expected_data
89
76
 
90
- @patch(
91
- "enterprise_data.api.v1.views.analytics_leaderboard.fetch_and_cache_enrollments_data"
92
- )
93
- @patch(
94
- "enterprise_data.api.v1.views.analytics_leaderboard.fetch_and_cache_engagements_data"
95
- )
96
- def test_get(self, mock_fetch_and_cache_engagements_data, mock_fetch_and_cache_enrollments_data):
77
+ @patch('enterprise_data.admin_analytics.database.tables.FactEngagementAdminDashTable.get_leaderboard_data_count')
78
+ @patch('enterprise_data.admin_analytics.database.tables.FactEngagementAdminDashTable.get_all_leaderboard_data')
79
+ def test_get(self, mock_get_all_leaderboard_data, mock_get_leaderboard_data_count):
97
80
  """
98
81
  Test the GET method for the AdvanceAnalyticsLeaderboardView works.
99
82
  """
100
- mock_fetch_and_cache_enrollments_data.return_value = enrollments_dataframe()
101
- mock_fetch_and_cache_engagements_data.return_value = engagements_dataframe()
102
-
103
- response = self.client.get(self.url, {"page_size": 2})
83
+ mock_get_all_leaderboard_data.return_value = LEADERBOARD_RESPONSE
84
+ mock_get_leaderboard_data_count.return_value = len(LEADERBOARD_RESPONSE)
85
+ response = self.client.get(self.url + '?page_size=2')
104
86
  assert response.status_code == status.HTTP_200_OK
105
- assert response["Content-Type"] == "application/json"
87
+ assert response['Content-Type'] == 'application/json'
106
88
  data = response.json()
107
- assert data["next"] == f'http://testserver{self.url}?page=2&page_size=2'
108
- assert data["previous"] is None
109
- assert data["current_page"] == 1
110
- assert data["num_pages"] == 6
111
- assert data["count"] == 12
112
- assert data["results"] == [
113
- {
114
- "email": "paul77@example.org",
115
- "daily_sessions": 1,
116
- "learning_time_seconds": 15753,
117
- "learning_time_hours": 4.4,
118
- "average_session_length": 4.4,
119
- "course_completions": None,
120
- },
121
- {
122
- "email": "seth57@example.org",
123
- "daily_sessions": 1,
124
- "learning_time_seconds": 9898,
125
- "learning_time_hours": 2.7,
126
- "average_session_length": 2.7,
127
- "course_completions": None,
128
- },
129
- ]
89
+ assert data['next'] == f'http://testserver{self.url}?page=2&page_size=2'
90
+ assert data['previous'] is None
91
+ assert data['current_page'] == 1
92
+ assert data['num_pages'] == 6
93
+ assert data['count'] == 12
130
94
 
131
95
  # fetch all records
132
- response = self.client.get(self.url, {"page_size": 20})
96
+ response = self.client.get(self.url + '?page_size=20')
133
97
  assert response.status_code == status.HTTP_200_OK
134
98
  data = response.json()
135
- assert data["next"] is None
136
- assert data["previous"] is None
137
- assert data["current_page"] == 1
138
- assert data["num_pages"] == 1
139
- assert data["count"] == 12
140
- assert data["results"] == LEADERBOARD_RESPONSE
141
-
142
- @patch(
143
- "enterprise_data.api.v1.views.analytics_leaderboard.fetch_and_cache_enrollments_data"
144
- )
145
- @patch(
146
- "enterprise_data.api.v1.views.analytics_leaderboard.fetch_and_cache_engagements_data"
147
- )
148
- def test_get_csv(self, mock_fetch_and_cache_engagements_data, mock_fetch_and_cache_enrollments_data):
99
+ assert data['next'] is None
100
+ assert data['previous'] is None
101
+ assert data['current_page'] == 1
102
+ assert data['num_pages'] == 1
103
+ assert data['count'] == 12
104
+
105
+ @patch('enterprise_data.admin_analytics.database.tables.FactEngagementAdminDashTable.get_leaderboard_data_count')
106
+ @patch('enterprise_data.admin_analytics.database.tables.FactEngagementAdminDashTable.get_all_leaderboard_data')
107
+ def test_get_csv(self, mock_get_all_leaderboard_data, mock_get_leaderboard_data_count):
149
108
  """
150
109
  Test the GET method for the AdvanceAnalyticsIndividualEnrollmentsView return correct CSV data.
151
110
  """
152
- mock_fetch_and_cache_enrollments_data.return_value = enrollments_dataframe()
153
- mock_fetch_and_cache_engagements_data.return_value = engagements_dataframe()
154
-
155
- response = self.client.get(self.url, {"response_type": ResponseType.CSV.value})
111
+ mock_get_all_leaderboard_data.return_value = LEADERBOARD_RESPONSE[:5]
112
+ mock_get_leaderboard_data_count.return_value = 5
113
+ response = self.client.get(self.url, {'response_type': ResponseType.CSV.value})
156
114
  assert response.status_code == status.HTTP_200_OK
157
115
 
158
116
  # verify the response headers
159
- assert response["Content-Type"] == "text/csv"
117
+ assert response['Content-Type'] == 'text/csv'
118
+
119
+ content = b"".join(response.streaming_content).decode().splitlines()
120
+ assert len(content) == 6
121
+
122
+ # Verify CSV header.
123
+ assert 'email,learning_time_hours,session_count,average_session_length,course_completion_count' == content[0]
160
124
 
161
- # verify the response content
162
- content = b"".join(response.streaming_content)
163
- assert content == leaderboard_csv_content()
125
+ # verify the content
126
+ assert 'paul77@example.org,4.4,,4.4,' in content
127
+ assert 'seth57@example.org,2.7,,2.7,' in content
128
+ assert 'weaverpatricia@example.net,2.6,,2.6,' in content
129
+ assert 'webertodd@example.com,1.5,,1.5,' in content
130
+ assert 'yferguson@example.net,1.3,,1.3,' in content