geek-cafe-saas-sdk 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of geek-cafe-saas-sdk might be problematic. Click here for more details.
- geek_cafe_saas_sdk/__init__.py +9 -0
- geek_cafe_saas_sdk/core/__init__.py +11 -0
- geek_cafe_saas_sdk/core/audit_mixin.py +33 -0
- geek_cafe_saas_sdk/core/error_codes.py +132 -0
- geek_cafe_saas_sdk/core/service_errors.py +19 -0
- geek_cafe_saas_sdk/core/service_result.py +121 -0
- geek_cafe_saas_sdk/decorators/__init__.py +64 -0
- geek_cafe_saas_sdk/decorators/auth.py +373 -0
- geek_cafe_saas_sdk/decorators/core.py +358 -0
- geek_cafe_saas_sdk/domains/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/analytics/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/analytics/handlers/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/analytics/models/__init__.py +9 -0
- geek_cafe_saas_sdk/domains/analytics/models/website_analytics.py +219 -0
- geek_cafe_saas_sdk/domains/analytics/models/website_analytics_summary.py +220 -0
- geek_cafe_saas_sdk/domains/analytics/services/__init__.py +11 -0
- geek_cafe_saas_sdk/domains/analytics/services/website_analytics_service.py +232 -0
- geek_cafe_saas_sdk/domains/analytics/services/website_analytics_summary_service.py +212 -0
- geek_cafe_saas_sdk/domains/analytics/services/website_analytics_tally_service.py +610 -0
- geek_cafe_saas_sdk/domains/auth/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/auth/handlers/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/auth/handlers/users/create/app.py +41 -0
- geek_cafe_saas_sdk/domains/auth/handlers/users/delete/app.py +41 -0
- geek_cafe_saas_sdk/domains/auth/handlers/users/get/app.py +39 -0
- geek_cafe_saas_sdk/domains/auth/handlers/users/list/app.py +36 -0
- geek_cafe_saas_sdk/domains/auth/handlers/users/update/app.py +44 -0
- geek_cafe_saas_sdk/domains/auth/models/__init__.py +13 -0
- geek_cafe_saas_sdk/domains/auth/models/permission.py +134 -0
- geek_cafe_saas_sdk/domains/auth/models/resource_permission.py +245 -0
- geek_cafe_saas_sdk/domains/auth/models/role.py +213 -0
- geek_cafe_saas_sdk/domains/auth/models/user.py +285 -0
- geek_cafe_saas_sdk/domains/auth/services/__init__.py +16 -0
- geek_cafe_saas_sdk/domains/auth/services/authorization_service.py +376 -0
- geek_cafe_saas_sdk/domains/auth/services/permission_registry.py +464 -0
- geek_cafe_saas_sdk/domains/auth/services/resource_permission_service.py +408 -0
- geek_cafe_saas_sdk/domains/auth/services/user_service.py +274 -0
- geek_cafe_saas_sdk/domains/communities/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/communities/handlers/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/communities/handlers/communities/create/app.py +41 -0
- geek_cafe_saas_sdk/domains/communities/handlers/communities/delete/app.py +41 -0
- geek_cafe_saas_sdk/domains/communities/handlers/communities/get/app.py +39 -0
- geek_cafe_saas_sdk/domains/communities/handlers/communities/list/app.py +36 -0
- geek_cafe_saas_sdk/domains/communities/handlers/communities/update/app.py +44 -0
- geek_cafe_saas_sdk/domains/communities/models/__init__.py +6 -0
- geek_cafe_saas_sdk/domains/communities/models/community.py +326 -0
- geek_cafe_saas_sdk/domains/communities/models/community_member.py +227 -0
- geek_cafe_saas_sdk/domains/communities/services/__init__.py +6 -0
- geek_cafe_saas_sdk/domains/communities/services/community_member_service.py +412 -0
- geek_cafe_saas_sdk/domains/communities/services/community_service.py +479 -0
- geek_cafe_saas_sdk/domains/events/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/events/handlers/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/events/handlers/attendees/app.py +67 -0
- geek_cafe_saas_sdk/domains/events/handlers/cancel/app.py +66 -0
- geek_cafe_saas_sdk/domains/events/handlers/check_in/app.py +60 -0
- geek_cafe_saas_sdk/domains/events/handlers/create/app.py +93 -0
- geek_cafe_saas_sdk/domains/events/handlers/delete/app.py +42 -0
- geek_cafe_saas_sdk/domains/events/handlers/get/app.py +39 -0
- geek_cafe_saas_sdk/domains/events/handlers/invite/app.py +98 -0
- geek_cafe_saas_sdk/domains/events/handlers/list/app.py +125 -0
- geek_cafe_saas_sdk/domains/events/handlers/publish/app.py +49 -0
- geek_cafe_saas_sdk/domains/events/handlers/rsvp/app.py +83 -0
- geek_cafe_saas_sdk/domains/events/handlers/update/app.py +44 -0
- geek_cafe_saas_sdk/domains/events/models/__init__.py +3 -0
- geek_cafe_saas_sdk/domains/events/models/event.py +681 -0
- geek_cafe_saas_sdk/domains/events/models/event_attendee.py +324 -0
- geek_cafe_saas_sdk/domains/events/services/__init__.py +9 -0
- geek_cafe_saas_sdk/domains/events/services/event_attendee_service.py +571 -0
- geek_cafe_saas_sdk/domains/events/services/event_service.py +684 -0
- geek_cafe_saas_sdk/domains/files/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/files/models/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/files/models/directory.py +258 -0
- geek_cafe_saas_sdk/domains/files/models/file.py +312 -0
- geek_cafe_saas_sdk/domains/files/models/file_share.py +268 -0
- geek_cafe_saas_sdk/domains/files/models/file_version.py +216 -0
- geek_cafe_saas_sdk/domains/files/services/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/files/services/directory_service.py +701 -0
- geek_cafe_saas_sdk/domains/files/services/file_share_service.py +663 -0
- geek_cafe_saas_sdk/domains/files/services/file_system_service.py +575 -0
- geek_cafe_saas_sdk/domains/files/services/file_version_service.py +739 -0
- geek_cafe_saas_sdk/domains/files/services/s3_file_service.py +501 -0
- geek_cafe_saas_sdk/domains/messaging/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/chat_channels/create/app.py +86 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/chat_channels/delete/app.py +65 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/chat_channels/get/app.py +64 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/chat_channels/list/app.py +97 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/chat_channels/update/app.py +149 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/chat_messages/create/app.py +67 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/chat_messages/delete/app.py +65 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/chat_messages/get/app.py +64 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/chat_messages/list/app.py +102 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/chat_messages/update/app.py +127 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/contact_threads/create/app.py +94 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/contact_threads/delete/app.py +66 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/contact_threads/get/app.py +67 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/contact_threads/list/app.py +95 -0
- geek_cafe_saas_sdk/domains/messaging/handlers/contact_threads/update/app.py +156 -0
- geek_cafe_saas_sdk/domains/messaging/models/__init__.py +13 -0
- geek_cafe_saas_sdk/domains/messaging/models/chat_channel.py +337 -0
- geek_cafe_saas_sdk/domains/messaging/models/chat_channel_member.py +180 -0
- geek_cafe_saas_sdk/domains/messaging/models/chat_message.py +426 -0
- geek_cafe_saas_sdk/domains/messaging/models/contact_thread.py +392 -0
- geek_cafe_saas_sdk/domains/messaging/services/__init__.py +11 -0
- geek_cafe_saas_sdk/domains/messaging/services/chat_channel_service.py +700 -0
- geek_cafe_saas_sdk/domains/messaging/services/chat_message_service.py +491 -0
- geek_cafe_saas_sdk/domains/messaging/services/contact_thread_service.py +497 -0
- geek_cafe_saas_sdk/domains/tenancy/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/tenancy/handlers/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/tenancy/handlers/subscriptions/activate/app.py +52 -0
- geek_cafe_saas_sdk/domains/tenancy/handlers/subscriptions/active/app.py +37 -0
- geek_cafe_saas_sdk/domains/tenancy/handlers/subscriptions/cancel/app.py +55 -0
- geek_cafe_saas_sdk/domains/tenancy/handlers/subscriptions/get/app.py +39 -0
- geek_cafe_saas_sdk/domains/tenancy/handlers/subscriptions/list/app.py +44 -0
- geek_cafe_saas_sdk/domains/tenancy/handlers/subscriptions/record_payment/app.py +56 -0
- geek_cafe_saas_sdk/domains/tenancy/handlers/tenants/get/app.py +39 -0
- geek_cafe_saas_sdk/domains/tenancy/handlers/tenants/me/app.py +37 -0
- geek_cafe_saas_sdk/domains/tenancy/handlers/tenants/signup/app.py +61 -0
- geek_cafe_saas_sdk/domains/tenancy/handlers/tenants/update/app.py +44 -0
- geek_cafe_saas_sdk/domains/tenancy/models/__init__.py +6 -0
- geek_cafe_saas_sdk/domains/tenancy/models/subscription.py +440 -0
- geek_cafe_saas_sdk/domains/tenancy/models/tenant.py +258 -0
- geek_cafe_saas_sdk/domains/tenancy/services/__init__.py +6 -0
- geek_cafe_saas_sdk/domains/tenancy/services/subscription_service.py +557 -0
- geek_cafe_saas_sdk/domains/tenancy/services/tenant_service.py +575 -0
- geek_cafe_saas_sdk/domains/voting/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/voting/handlers/__init__.py +0 -0
- geek_cafe_saas_sdk/domains/voting/handlers/votes/create/app.py +128 -0
- geek_cafe_saas_sdk/domains/voting/handlers/votes/delete/app.py +41 -0
- geek_cafe_saas_sdk/domains/voting/handlers/votes/get/app.py +39 -0
- geek_cafe_saas_sdk/domains/voting/handlers/votes/list/app.py +38 -0
- geek_cafe_saas_sdk/domains/voting/handlers/votes/summerize/README.md +3 -0
- geek_cafe_saas_sdk/domains/voting/handlers/votes/update/app.py +44 -0
- geek_cafe_saas_sdk/domains/voting/models/__init__.py +9 -0
- geek_cafe_saas_sdk/domains/voting/models/vote.py +231 -0
- geek_cafe_saas_sdk/domains/voting/models/vote_summary.py +193 -0
- geek_cafe_saas_sdk/domains/voting/services/__init__.py +11 -0
- geek_cafe_saas_sdk/domains/voting/services/vote_service.py +264 -0
- geek_cafe_saas_sdk/domains/voting/services/vote_summary_service.py +198 -0
- geek_cafe_saas_sdk/domains/voting/services/vote_tally_service.py +533 -0
- geek_cafe_saas_sdk/lambda_handlers/README.md +404 -0
- geek_cafe_saas_sdk/lambda_handlers/__init__.py +67 -0
- geek_cafe_saas_sdk/lambda_handlers/_base/__init__.py +25 -0
- geek_cafe_saas_sdk/lambda_handlers/_base/api_key_handler.py +129 -0
- geek_cafe_saas_sdk/lambda_handlers/_base/authorized_secure_handler.py +218 -0
- geek_cafe_saas_sdk/lambda_handlers/_base/base_handler.py +185 -0
- geek_cafe_saas_sdk/lambda_handlers/_base/handler_factory.py +256 -0
- geek_cafe_saas_sdk/lambda_handlers/_base/public_handler.py +53 -0
- geek_cafe_saas_sdk/lambda_handlers/_base/secure_handler.py +89 -0
- geek_cafe_saas_sdk/lambda_handlers/_base/service_pool.py +94 -0
- geek_cafe_saas_sdk/lambda_handlers/directories/create/app.py +79 -0
- geek_cafe_saas_sdk/lambda_handlers/directories/delete/app.py +76 -0
- geek_cafe_saas_sdk/lambda_handlers/directories/get/app.py +74 -0
- geek_cafe_saas_sdk/lambda_handlers/directories/list/app.py +75 -0
- geek_cafe_saas_sdk/lambda_handlers/directories/move/app.py +79 -0
- geek_cafe_saas_sdk/lambda_handlers/files/delete/app.py +121 -0
- geek_cafe_saas_sdk/lambda_handlers/files/download/app.py +187 -0
- geek_cafe_saas_sdk/lambda_handlers/files/get/app.py +127 -0
- geek_cafe_saas_sdk/lambda_handlers/files/list/app.py +108 -0
- geek_cafe_saas_sdk/lambda_handlers/files/share/app.py +83 -0
- geek_cafe_saas_sdk/lambda_handlers/files/shares/list/app.py +84 -0
- geek_cafe_saas_sdk/lambda_handlers/files/shares/revoke/app.py +76 -0
- geek_cafe_saas_sdk/lambda_handlers/files/update/app.py +143 -0
- geek_cafe_saas_sdk/lambda_handlers/files/upload/app.py +151 -0
- geek_cafe_saas_sdk/middleware/__init__.py +36 -0
- geek_cafe_saas_sdk/middleware/auth.py +85 -0
- geek_cafe_saas_sdk/middleware/authorization.py +523 -0
- geek_cafe_saas_sdk/middleware/cors.py +63 -0
- geek_cafe_saas_sdk/middleware/error_handling.py +114 -0
- geek_cafe_saas_sdk/middleware/validation.py +80 -0
- geek_cafe_saas_sdk/models/__init__.py +20 -0
- geek_cafe_saas_sdk/models/base_model.py +233 -0
- geek_cafe_saas_sdk/services/__init__.py +18 -0
- geek_cafe_saas_sdk/services/database_service.py +441 -0
- geek_cafe_saas_sdk/utilities/__init__.py +88 -0
- geek_cafe_saas_sdk/utilities/cognito_utility.py +568 -0
- geek_cafe_saas_sdk/utilities/custom_exceptions.py +183 -0
- geek_cafe_saas_sdk/utilities/datetime_utility.py +410 -0
- geek_cafe_saas_sdk/utilities/dictionary_utility.py +78 -0
- geek_cafe_saas_sdk/utilities/dynamodb_utils.py +151 -0
- geek_cafe_saas_sdk/utilities/environment_loader.py +149 -0
- geek_cafe_saas_sdk/utilities/environment_variables.py +228 -0
- geek_cafe_saas_sdk/utilities/http_body_parameters.py +44 -0
- geek_cafe_saas_sdk/utilities/http_path_parameters.py +60 -0
- geek_cafe_saas_sdk/utilities/http_status_code.py +63 -0
- geek_cafe_saas_sdk/utilities/jwt_utility.py +234 -0
- geek_cafe_saas_sdk/utilities/lambda_event_utility.py +776 -0
- geek_cafe_saas_sdk/utilities/logging_utility.py +64 -0
- geek_cafe_saas_sdk/utilities/message_query_helper.py +340 -0
- geek_cafe_saas_sdk/utilities/response.py +209 -0
- geek_cafe_saas_sdk/utilities/string_functions.py +180 -0
- geek_cafe_saas_sdk-0.6.0.dist-info/METADATA +397 -0
- geek_cafe_saas_sdk-0.6.0.dist-info/RECORD +194 -0
- geek_cafe_saas_sdk-0.6.0.dist-info/WHEEL +4 -0
- geek_cafe_saas_sdk-0.6.0.dist-info/licenses/LICENSE +47 -0
|
@@ -0,0 +1,610 @@
|
|
|
1
|
+
# Website Analytics Tally Service
|
|
2
|
+
|
|
3
|
+
from typing import Dict, Any, Optional, List
|
|
4
|
+
from boto3_assist.dynamodb.dynamodb import DynamoDB
|
|
5
|
+
from geek_cafe_saas_sdk.core.service_result import ServiceResult
|
|
6
|
+
from geek_cafe_saas_sdk.core.error_codes import ErrorCode
|
|
7
|
+
from .website_analytics_service import WebsiteAnalyticsService
|
|
8
|
+
from .website_analytics_summary_service import WebsiteAnalyticsSummaryService
|
|
9
|
+
from geek_cafe_saas_sdk.domains.analytics.models import WebsiteAnalytics, WebsiteAnalyticsSummary
|
|
10
|
+
from aws_lambda_powertools import Logger
|
|
11
|
+
import os
|
|
12
|
+
import datetime as dt
|
|
13
|
+
import time
|
|
14
|
+
|
|
15
|
+
logger = Logger()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class WebsiteAnalyticsTallyService:
|
|
19
|
+
"""
|
|
20
|
+
Service for aggregating analytics data into summaries.
|
|
21
|
+
|
|
22
|
+
Designed to be called by EventBridge scheduled jobs (e.g., hourly).
|
|
23
|
+
Aggregates raw analytics events into summary records for efficient querying.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self, *, dynamodb: DynamoDB = None, table_name: str = None):
|
|
27
|
+
self.analytics_service = WebsiteAnalyticsService(dynamodb=dynamodb, table_name=table_name)
|
|
28
|
+
self.summary_service = WebsiteAnalyticsSummaryService(dynamodb=dynamodb, table_name=table_name)
|
|
29
|
+
self.page_size = 100 # Configurable page size for pagination
|
|
30
|
+
|
|
31
|
+
# Pagination monitoring configuration from environment variables
|
|
32
|
+
self.max_pagination_iterations = int(os.getenv('TALLY_MAX_PAGINATION_ITERATIONS', '50'))
|
|
33
|
+
self.max_pagination_time_seconds = int(os.getenv('TALLY_MAX_PAGINATION_TIME_SECONDS', '30'))
|
|
34
|
+
self.halt_on_pagination_limit = os.getenv('TALLY_HALT_ON_PAGINATION_LIMIT', 'false').lower() == 'true'
|
|
35
|
+
|
|
36
|
+
def aggregate_analytics_for_route(self, route: str, tenant_id: str, user_id: str,
|
|
37
|
+
period_start_ts: float, period_end_ts: float,
|
|
38
|
+
period_type: str = "hourly") -> ServiceResult[WebsiteAnalyticsSummary]:
|
|
39
|
+
"""
|
|
40
|
+
Aggregate all analytics for a specific route within a time period.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
route: The route/slug to aggregate analytics for
|
|
44
|
+
tenant_id: Tenant ID for access control
|
|
45
|
+
user_id: User ID for audit trail
|
|
46
|
+
period_start_ts: Start of aggregation period (UTC timestamp)
|
|
47
|
+
period_end_ts: End of aggregation period (UTC timestamp)
|
|
48
|
+
period_type: Type of period (hourly, daily, weekly, monthly)
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
ServiceResult containing the created/updated WebsiteAnalyticsSummary
|
|
52
|
+
"""
|
|
53
|
+
try:
|
|
54
|
+
logger.info(f"Starting analytics aggregation for route: {route}, period: {period_start_ts} to {period_end_ts}")
|
|
55
|
+
|
|
56
|
+
# Get all analytics for this route with pagination support
|
|
57
|
+
all_analytics = []
|
|
58
|
+
start_key = None
|
|
59
|
+
pagination_iterations = 0
|
|
60
|
+
pagination_start_time = time.time()
|
|
61
|
+
|
|
62
|
+
while True:
|
|
63
|
+
pagination_iterations += 1
|
|
64
|
+
pagination_elapsed = time.time() - pagination_start_time
|
|
65
|
+
|
|
66
|
+
# Check pagination limits
|
|
67
|
+
if pagination_iterations > self.max_pagination_iterations:
|
|
68
|
+
logger.warning(
|
|
69
|
+
"Analytics pagination iteration limit exceeded",
|
|
70
|
+
extra={
|
|
71
|
+
"metric_name": "AnalyticsPaginationIterationsExceeded",
|
|
72
|
+
"metric_value": pagination_iterations,
|
|
73
|
+
"route": route,
|
|
74
|
+
"analytics_collected": len(all_analytics),
|
|
75
|
+
"max_iterations": self.max_pagination_iterations
|
|
76
|
+
}
|
|
77
|
+
)
|
|
78
|
+
if self.halt_on_pagination_limit:
|
|
79
|
+
logger.error(f"Halting pagination after {pagination_iterations} iterations")
|
|
80
|
+
break
|
|
81
|
+
|
|
82
|
+
if pagination_elapsed > self.max_pagination_time_seconds:
|
|
83
|
+
logger.warning(
|
|
84
|
+
"Analytics pagination time limit exceeded",
|
|
85
|
+
extra={
|
|
86
|
+
"metric_name": "AnalyticsPaginationTimeExceeded",
|
|
87
|
+
"metric_value": pagination_elapsed,
|
|
88
|
+
"route": route,
|
|
89
|
+
"analytics_collected": len(all_analytics),
|
|
90
|
+
"max_time_seconds": self.max_pagination_time_seconds
|
|
91
|
+
}
|
|
92
|
+
)
|
|
93
|
+
if self.halt_on_pagination_limit:
|
|
94
|
+
logger.error(f"Halting pagination after {pagination_elapsed:.2f} seconds")
|
|
95
|
+
break
|
|
96
|
+
|
|
97
|
+
analytics_result = self.analytics_service.list_by_route(route, start_key=start_key)
|
|
98
|
+
|
|
99
|
+
if not analytics_result.success:
|
|
100
|
+
logger.error(f"Failed to retrieve analytics for route {route}: {analytics_result.message}")
|
|
101
|
+
return ServiceResult.error_result(
|
|
102
|
+
message=f"Failed to retrieve analytics: {analytics_result.message}",
|
|
103
|
+
error_code=analytics_result.error_code
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
# Add this page of results
|
|
107
|
+
if analytics_result.data:
|
|
108
|
+
all_analytics.extend(analytics_result.data)
|
|
109
|
+
|
|
110
|
+
# Check if there are more pages via error_details
|
|
111
|
+
if (analytics_result.error_details and
|
|
112
|
+
'last_evaluated_key' in analytics_result.error_details):
|
|
113
|
+
start_key = analytics_result.error_details['last_evaluated_key']
|
|
114
|
+
logger.debug(f"Fetching next page of analytics, total so far: {len(all_analytics)}")
|
|
115
|
+
else:
|
|
116
|
+
# No more pages
|
|
117
|
+
break
|
|
118
|
+
|
|
119
|
+
# Log pagination metrics
|
|
120
|
+
logger.info(
|
|
121
|
+
"Pagination completed for analytics aggregation",
|
|
122
|
+
extra={
|
|
123
|
+
"metric_name": "AnalyticsPaginationCompleted",
|
|
124
|
+
"iterations": pagination_iterations,
|
|
125
|
+
"elapsed_seconds": pagination_elapsed,
|
|
126
|
+
"analytics_collected": len(all_analytics),
|
|
127
|
+
"route": route
|
|
128
|
+
}
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
# Filter analytics by time period
|
|
132
|
+
analytics_in_period = [
|
|
133
|
+
a for a in all_analytics
|
|
134
|
+
if period_start_ts <= a.created_utc_ts <= period_end_ts
|
|
135
|
+
]
|
|
136
|
+
|
|
137
|
+
if not analytics_in_period:
|
|
138
|
+
# No analytics - create empty summary
|
|
139
|
+
return self._create_empty_summary(route, tenant_id, user_id,
|
|
140
|
+
period_start_ts, period_end_ts, period_type)
|
|
141
|
+
|
|
142
|
+
# Group by analytics type
|
|
143
|
+
analytics_by_type = {}
|
|
144
|
+
for analytics in analytics_in_period:
|
|
145
|
+
analytics_type = analytics.analytics_type
|
|
146
|
+
if analytics_type not in analytics_by_type:
|
|
147
|
+
analytics_by_type[analytics_type] = []
|
|
148
|
+
analytics_by_type[analytics_type].append(analytics)
|
|
149
|
+
|
|
150
|
+
# Create summaries for each type
|
|
151
|
+
summaries = []
|
|
152
|
+
for analytics_type, analytics_list in analytics_by_type.items():
|
|
153
|
+
summary_data = self._aggregate_by_type(analytics_type, analytics_list)
|
|
154
|
+
|
|
155
|
+
summary_result = self._create_or_update_summary(
|
|
156
|
+
route, tenant_id, user_id, analytics_type,
|
|
157
|
+
period_start_ts, period_end_ts, period_type,
|
|
158
|
+
summary_data
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
if summary_result.success:
|
|
162
|
+
summaries.append(summary_result.data)
|
|
163
|
+
logger.info(f"Summary created for route {route}, type {analytics_type}: {len(analytics_list)} events")
|
|
164
|
+
|
|
165
|
+
# Return the first summary (or general if available)
|
|
166
|
+
if summaries:
|
|
167
|
+
general_summaries = [s for s in summaries if s.analytics_type == "general"]
|
|
168
|
+
return ServiceResult.success_result(general_summaries[0] if general_summaries else summaries[0])
|
|
169
|
+
|
|
170
|
+
return self._create_empty_summary(route, tenant_id, user_id,
|
|
171
|
+
period_start_ts, period_end_ts, period_type)
|
|
172
|
+
|
|
173
|
+
except Exception as e:
|
|
174
|
+
logger.error(f"Error aggregating analytics for route {route}: {str(e)}")
|
|
175
|
+
return ServiceResult.exception_result(
|
|
176
|
+
e,
|
|
177
|
+
error_code=ErrorCode.OPERATION_FAILED,
|
|
178
|
+
context=f"Failed to aggregate analytics for route {route}"
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
def _aggregate_by_type(self, analytics_type: str, analytics_list: List[WebsiteAnalytics]) -> Dict[str, Any]:
|
|
182
|
+
"""Aggregate analytics data based on type."""
|
|
183
|
+
if analytics_type == "general":
|
|
184
|
+
return self._aggregate_general_analytics(analytics_list)
|
|
185
|
+
elif analytics_type == "error":
|
|
186
|
+
return self._aggregate_error_analytics(analytics_list)
|
|
187
|
+
elif analytics_type == "performance":
|
|
188
|
+
return self._aggregate_performance_analytics(analytics_list)
|
|
189
|
+
elif analytics_type == "custom":
|
|
190
|
+
return self._aggregate_custom_analytics(analytics_list)
|
|
191
|
+
else:
|
|
192
|
+
return self._aggregate_general_analytics(analytics_list)
|
|
193
|
+
|
|
194
|
+
def _aggregate_general_analytics(self, analytics_list: List[WebsiteAnalytics]) -> Dict[str, Any]:
|
|
195
|
+
"""Aggregate general analytics (page views, sessions)."""
|
|
196
|
+
total_events = len(analytics_list)
|
|
197
|
+
|
|
198
|
+
# Count unique sessions and users
|
|
199
|
+
unique_sessions: Set[str] = set()
|
|
200
|
+
unique_users: Set[str] = set()
|
|
201
|
+
|
|
202
|
+
# Collect metrics
|
|
203
|
+
durations = []
|
|
204
|
+
scroll_depths = []
|
|
205
|
+
|
|
206
|
+
for analytics in analytics_list:
|
|
207
|
+
if analytics.session_id:
|
|
208
|
+
unique_sessions.add(analytics.session_id)
|
|
209
|
+
if analytics.user_id:
|
|
210
|
+
unique_users.add(analytics.user_id)
|
|
211
|
+
|
|
212
|
+
# Extract metrics from data dict
|
|
213
|
+
if analytics.data:
|
|
214
|
+
duration = analytics.data.get('duration_ms')
|
|
215
|
+
if duration is not None:
|
|
216
|
+
durations.append(duration)
|
|
217
|
+
|
|
218
|
+
scroll = analytics.data.get('scroll_depth')
|
|
219
|
+
if scroll is not None:
|
|
220
|
+
scroll_depths.append(scroll)
|
|
221
|
+
|
|
222
|
+
metrics = {
|
|
223
|
+
"page_views": total_events,
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
# Calculate averages
|
|
227
|
+
if durations:
|
|
228
|
+
metrics["avg_duration_ms"] = sum(durations) / len(durations)
|
|
229
|
+
metrics["max_duration_ms"] = max(durations)
|
|
230
|
+
metrics["min_duration_ms"] = min(durations)
|
|
231
|
+
|
|
232
|
+
if scroll_depths:
|
|
233
|
+
metrics["avg_scroll_depth"] = sum(scroll_depths) / len(scroll_depths)
|
|
234
|
+
|
|
235
|
+
return {
|
|
236
|
+
"total_events": total_events,
|
|
237
|
+
"unique_sessions": len(unique_sessions),
|
|
238
|
+
"unique_users": len(unique_users),
|
|
239
|
+
"metrics": metrics
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
def _aggregate_error_analytics(self, analytics_list: List[WebsiteAnalytics]) -> Dict[str, Any]:
|
|
243
|
+
"""Aggregate error analytics."""
|
|
244
|
+
total_events = len(analytics_list)
|
|
245
|
+
|
|
246
|
+
# Count unique sessions and users
|
|
247
|
+
unique_sessions: Set[str] = set()
|
|
248
|
+
unique_users: Set[str] = set()
|
|
249
|
+
|
|
250
|
+
# Count errors by type
|
|
251
|
+
error_counts = {}
|
|
252
|
+
error_messages = []
|
|
253
|
+
|
|
254
|
+
for analytics in analytics_list:
|
|
255
|
+
if analytics.session_id:
|
|
256
|
+
unique_sessions.add(analytics.session_id)
|
|
257
|
+
if analytics.user_id:
|
|
258
|
+
unique_users.add(analytics.user_id)
|
|
259
|
+
|
|
260
|
+
if analytics.data:
|
|
261
|
+
error_type = analytics.data.get('error_type', 'unknown')
|
|
262
|
+
error_counts[error_type] = error_counts.get(error_type, 0) + 1
|
|
263
|
+
|
|
264
|
+
error_msg = analytics.data.get('error_message')
|
|
265
|
+
if error_msg:
|
|
266
|
+
error_messages.append(error_msg)
|
|
267
|
+
|
|
268
|
+
metrics = {
|
|
269
|
+
"total_errors": total_events,
|
|
270
|
+
"errors_by_type": error_counts,
|
|
271
|
+
"unique_error_messages": len(set(error_messages))
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
return {
|
|
275
|
+
"total_events": total_events,
|
|
276
|
+
"unique_sessions": len(unique_sessions),
|
|
277
|
+
"unique_users": len(unique_users),
|
|
278
|
+
"metrics": metrics
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
def _aggregate_performance_analytics(self, analytics_list: List[WebsiteAnalytics]) -> Dict[str, Any]:
|
|
282
|
+
"""Aggregate performance analytics."""
|
|
283
|
+
total_events = len(analytics_list)
|
|
284
|
+
|
|
285
|
+
# Count unique sessions and users
|
|
286
|
+
unique_sessions: Set[str] = set()
|
|
287
|
+
unique_users: Set[str] = set()
|
|
288
|
+
|
|
289
|
+
# Collect performance metrics
|
|
290
|
+
load_times = []
|
|
291
|
+
ttfb_times = []
|
|
292
|
+
fcp_times = []
|
|
293
|
+
lcp_times = []
|
|
294
|
+
|
|
295
|
+
for analytics in analytics_list:
|
|
296
|
+
if analytics.session_id:
|
|
297
|
+
unique_sessions.add(analytics.session_id)
|
|
298
|
+
if analytics.user_id:
|
|
299
|
+
unique_users.add(analytics.user_id)
|
|
300
|
+
|
|
301
|
+
if analytics.data:
|
|
302
|
+
load_time = analytics.data.get('load_time_ms')
|
|
303
|
+
if load_time is not None:
|
|
304
|
+
load_times.append(load_time)
|
|
305
|
+
|
|
306
|
+
ttfb = analytics.data.get('ttfb_ms')
|
|
307
|
+
if ttfb is not None:
|
|
308
|
+
ttfb_times.append(ttfb)
|
|
309
|
+
|
|
310
|
+
fcp = analytics.data.get('fcp_ms')
|
|
311
|
+
if fcp is not None:
|
|
312
|
+
fcp_times.append(fcp)
|
|
313
|
+
|
|
314
|
+
lcp = analytics.data.get('lcp_ms')
|
|
315
|
+
if lcp is not None:
|
|
316
|
+
lcp_times.append(lcp)
|
|
317
|
+
|
|
318
|
+
metrics = {}
|
|
319
|
+
|
|
320
|
+
if load_times:
|
|
321
|
+
metrics["avg_load_time_ms"] = sum(load_times) / len(load_times)
|
|
322
|
+
metrics["p95_load_time_ms"] = self._calculate_percentile(load_times, 95)
|
|
323
|
+
|
|
324
|
+
if ttfb_times:
|
|
325
|
+
metrics["avg_ttfb_ms"] = sum(ttfb_times) / len(ttfb_times)
|
|
326
|
+
|
|
327
|
+
if fcp_times:
|
|
328
|
+
metrics["avg_fcp_ms"] = sum(fcp_times) / len(fcp_times)
|
|
329
|
+
|
|
330
|
+
if lcp_times:
|
|
331
|
+
metrics["avg_lcp_ms"] = sum(lcp_times) / len(lcp_times)
|
|
332
|
+
|
|
333
|
+
return {
|
|
334
|
+
"total_events": total_events,
|
|
335
|
+
"unique_sessions": len(unique_sessions),
|
|
336
|
+
"unique_users": len(unique_users),
|
|
337
|
+
"metrics": metrics
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
def _aggregate_custom_analytics(self, analytics_list: List[WebsiteAnalytics]) -> Dict[str, Any]:
|
|
341
|
+
"""Aggregate custom analytics."""
|
|
342
|
+
total_events = len(analytics_list)
|
|
343
|
+
|
|
344
|
+
# Count unique sessions and users
|
|
345
|
+
unique_sessions: Set[str] = set()
|
|
346
|
+
unique_users: Set[str] = set()
|
|
347
|
+
|
|
348
|
+
# Count events by name
|
|
349
|
+
event_counts = {}
|
|
350
|
+
|
|
351
|
+
for analytics in analytics_list:
|
|
352
|
+
if analytics.session_id:
|
|
353
|
+
unique_sessions.add(analytics.session_id)
|
|
354
|
+
if analytics.user_id:
|
|
355
|
+
unique_users.add(analytics.user_id)
|
|
356
|
+
|
|
357
|
+
if analytics.data:
|
|
358
|
+
event_name = analytics.data.get('event', 'unknown')
|
|
359
|
+
event_counts[event_name] = event_counts.get(event_name, 0) + 1
|
|
360
|
+
|
|
361
|
+
metrics = {
|
|
362
|
+
"total_events": total_events,
|
|
363
|
+
"events_by_name": event_counts
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
return {
|
|
367
|
+
"total_events": total_events,
|
|
368
|
+
"unique_sessions": len(unique_sessions),
|
|
369
|
+
"unique_users": len(unique_users),
|
|
370
|
+
"metrics": metrics
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
def _calculate_percentile(self, values: List[float], percentile: int) -> float:
|
|
374
|
+
"""Calculate percentile of a list of values."""
|
|
375
|
+
if not values:
|
|
376
|
+
return 0.0
|
|
377
|
+
sorted_values = sorted(values)
|
|
378
|
+
index = int((percentile / 100) * len(sorted_values))
|
|
379
|
+
return sorted_values[min(index, len(sorted_values) - 1)]
|
|
380
|
+
|
|
381
|
+
def _create_empty_summary(self, route: str, tenant_id: str, user_id: str,
|
|
382
|
+
period_start_ts: float, period_end_ts: float,
|
|
383
|
+
period_type: str) -> ServiceResult[WebsiteAnalyticsSummary]:
|
|
384
|
+
"""Create an empty summary for routes with no analytics."""
|
|
385
|
+
return self.summary_service.create(
|
|
386
|
+
tenant_id=tenant_id,
|
|
387
|
+
user_id=user_id,
|
|
388
|
+
route=route,
|
|
389
|
+
analytics_type="general",
|
|
390
|
+
period_start_ts=period_start_ts,
|
|
391
|
+
period_end_ts=period_end_ts,
|
|
392
|
+
period_type=period_type,
|
|
393
|
+
total_events=0,
|
|
394
|
+
unique_sessions=0,
|
|
395
|
+
unique_users=0,
|
|
396
|
+
metrics={},
|
|
397
|
+
content={
|
|
398
|
+
"last_tallied_utc_ts": self._get_current_timestamp(),
|
|
399
|
+
"event_count": 0
|
|
400
|
+
}
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
def _create_or_update_summary(self, route: str, tenant_id: str, user_id: str,
|
|
404
|
+
analytics_type: str, period_start_ts: float,
|
|
405
|
+
period_end_ts: float, period_type: str,
|
|
406
|
+
summary_data: Dict[str, Any]) -> ServiceResult[WebsiteAnalyticsSummary]:
|
|
407
|
+
"""Create or update analytics summary."""
|
|
408
|
+
return self.summary_service.create(
|
|
409
|
+
tenant_id=tenant_id,
|
|
410
|
+
user_id=user_id,
|
|
411
|
+
route=route,
|
|
412
|
+
analytics_type=analytics_type,
|
|
413
|
+
period_start_ts=period_start_ts,
|
|
414
|
+
period_end_ts=period_end_ts,
|
|
415
|
+
period_type=period_type,
|
|
416
|
+
total_events=summary_data["total_events"],
|
|
417
|
+
unique_sessions=summary_data["unique_sessions"],
|
|
418
|
+
unique_users=summary_data["unique_users"],
|
|
419
|
+
metrics=summary_data["metrics"],
|
|
420
|
+
content={
|
|
421
|
+
"last_tallied_utc_ts": self._get_current_timestamp(),
|
|
422
|
+
"event_count": summary_data["total_events"]
|
|
423
|
+
}
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
def aggregate_multiple_routes(self, routes: List[str], tenant_id: str, user_id: str,
|
|
427
|
+
period_start_ts: float, period_end_ts: float,
|
|
428
|
+
period_type: str = "hourly") -> ServiceResult[List[WebsiteAnalyticsSummary]]:
|
|
429
|
+
"""
|
|
430
|
+
Aggregate analytics for multiple routes efficiently.
|
|
431
|
+
|
|
432
|
+
This is useful for batch processing or scheduled jobs.
|
|
433
|
+
|
|
434
|
+
Args:
|
|
435
|
+
routes: List of routes to process
|
|
436
|
+
tenant_id: Tenant ID for access control
|
|
437
|
+
user_id: User ID for audit trail
|
|
438
|
+
period_start_ts: Start of aggregation period
|
|
439
|
+
period_end_ts: End of aggregation period
|
|
440
|
+
period_type: Type of period (hourly, daily, etc.)
|
|
441
|
+
|
|
442
|
+
Returns:
|
|
443
|
+
ServiceResult containing list of created summaries
|
|
444
|
+
"""
|
|
445
|
+
try:
|
|
446
|
+
logger.info(f"Starting batch aggregation for {len(routes)} routes")
|
|
447
|
+
|
|
448
|
+
summaries = []
|
|
449
|
+
failed_routes = []
|
|
450
|
+
|
|
451
|
+
for route in routes:
|
|
452
|
+
result = self.aggregate_analytics_for_route(
|
|
453
|
+
route, tenant_id, user_id, period_start_ts, period_end_ts, period_type
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
if result.success:
|
|
457
|
+
summaries.append(result.data)
|
|
458
|
+
else:
|
|
459
|
+
failed_routes.append({
|
|
460
|
+
'route': route,
|
|
461
|
+
'message': result.message,
|
|
462
|
+
'error_code': result.error_code
|
|
463
|
+
})
|
|
464
|
+
logger.warning(f"Failed to aggregate route {route}: {result.message}")
|
|
465
|
+
|
|
466
|
+
if failed_routes:
|
|
467
|
+
logger.warning(f"Batch aggregation completed with {len(failed_routes)} failures out of {len(routes)} routes")
|
|
468
|
+
return ServiceResult.error_result(
|
|
469
|
+
message=f"Batch aggregation completed with failures: {len(failed_routes)}/{len(routes)} failed",
|
|
470
|
+
error_code=ErrorCode.PARTIAL_FAILURE,
|
|
471
|
+
error_details={
|
|
472
|
+
'successful_count': len(summaries),
|
|
473
|
+
'failed_count': len(failed_routes),
|
|
474
|
+
'failed_routes': failed_routes,
|
|
475
|
+
'successful_summaries': summaries
|
|
476
|
+
}
|
|
477
|
+
)
|
|
478
|
+
else:
|
|
479
|
+
logger.info(f"Batch aggregation completed successfully for all {len(routes)} routes")
|
|
480
|
+
return ServiceResult.success_result(summaries)
|
|
481
|
+
|
|
482
|
+
except Exception as e:
|
|
483
|
+
logger.error(f"Error in batch aggregation operation: {str(e)}")
|
|
484
|
+
return ServiceResult.exception_result(
|
|
485
|
+
e,
|
|
486
|
+
error_code=ErrorCode.BATCH_OPERATION_FAILED,
|
|
487
|
+
context="Failed to process batch aggregation operation"
|
|
488
|
+
)
|
|
489
|
+
|
|
490
|
+
def aggregate_hourly(self, tenant_id: str, user_id: str,
|
|
491
|
+
hours_ago: int = 1) -> ServiceResult[List[WebsiteAnalyticsSummary]]:
|
|
492
|
+
"""
|
|
493
|
+
Aggregate analytics for the last N hours.
|
|
494
|
+
|
|
495
|
+
This is the main method to be called by EventBridge scheduled jobs.
|
|
496
|
+
|
|
497
|
+
Args:
|
|
498
|
+
tenant_id: Tenant ID for access control
|
|
499
|
+
user_id: User ID for audit trail
|
|
500
|
+
hours_ago: How many hours ago to start aggregation from
|
|
501
|
+
|
|
502
|
+
Returns:
|
|
503
|
+
ServiceResult containing list of created summaries
|
|
504
|
+
"""
|
|
505
|
+
try:
|
|
506
|
+
current_time = self._get_current_timestamp()
|
|
507
|
+
period_end_ts = current_time
|
|
508
|
+
period_start_ts = current_time - (hours_ago * 3600) # Convert hours to seconds
|
|
509
|
+
|
|
510
|
+
logger.info(f"Starting hourly aggregation for period: {period_start_ts} to {period_end_ts}")
|
|
511
|
+
|
|
512
|
+
# Get all analytics in the time period with pagination support
|
|
513
|
+
all_analytics = []
|
|
514
|
+
start_key = None
|
|
515
|
+
pagination_iterations = 0
|
|
516
|
+
pagination_start_time = time.time()
|
|
517
|
+
|
|
518
|
+
while True:
|
|
519
|
+
pagination_iterations += 1
|
|
520
|
+
pagination_elapsed = time.time() - pagination_start_time
|
|
521
|
+
|
|
522
|
+
# Check pagination limits
|
|
523
|
+
if pagination_iterations > self.max_pagination_iterations:
|
|
524
|
+
logger.warning(
|
|
525
|
+
"Hourly analytics pagination iteration limit exceeded",
|
|
526
|
+
extra={
|
|
527
|
+
"metric_name": "HourlyAnalyticsPaginationIterationsExceeded",
|
|
528
|
+
"metric_value": pagination_iterations,
|
|
529
|
+
"tenant_id": tenant_id,
|
|
530
|
+
"analytics_collected": len(all_analytics),
|
|
531
|
+
"max_iterations": self.max_pagination_iterations
|
|
532
|
+
}
|
|
533
|
+
)
|
|
534
|
+
if self.halt_on_pagination_limit:
|
|
535
|
+
logger.error(f"Halting pagination after {pagination_iterations} iterations")
|
|
536
|
+
break
|
|
537
|
+
|
|
538
|
+
if pagination_elapsed > self.max_pagination_time_seconds:
|
|
539
|
+
logger.warning(
|
|
540
|
+
"Hourly analytics pagination time limit exceeded",
|
|
541
|
+
extra={
|
|
542
|
+
"metric_name": "HourlyAnalyticsPaginationTimeExceeded",
|
|
543
|
+
"metric_value": pagination_elapsed,
|
|
544
|
+
"tenant_id": tenant_id,
|
|
545
|
+
"analytics_collected": len(all_analytics),
|
|
546
|
+
"max_time_seconds": self.max_pagination_time_seconds
|
|
547
|
+
}
|
|
548
|
+
)
|
|
549
|
+
if self.halt_on_pagination_limit:
|
|
550
|
+
logger.error(f"Halting pagination after {pagination_elapsed:.2f} seconds")
|
|
551
|
+
break
|
|
552
|
+
|
|
553
|
+
analytics_result = self.analytics_service.list_by_tenant(tenant_id, start_key=start_key)
|
|
554
|
+
|
|
555
|
+
if not analytics_result.success:
|
|
556
|
+
return analytics_result
|
|
557
|
+
|
|
558
|
+
# Add this page of results
|
|
559
|
+
if analytics_result.data:
|
|
560
|
+
all_analytics.extend(analytics_result.data)
|
|
561
|
+
|
|
562
|
+
# Check if there are more pages via error_details
|
|
563
|
+
if (analytics_result.error_details and
|
|
564
|
+
'last_evaluated_key' in analytics_result.error_details):
|
|
565
|
+
start_key = analytics_result.error_details['last_evaluated_key']
|
|
566
|
+
logger.debug(f"Fetching next page of tenant analytics, total so far: {len(all_analytics)}")
|
|
567
|
+
else:
|
|
568
|
+
# No more pages
|
|
569
|
+
break
|
|
570
|
+
|
|
571
|
+
# Log pagination metrics
|
|
572
|
+
logger.info(
|
|
573
|
+
"Pagination completed for hourly aggregation",
|
|
574
|
+
extra={
|
|
575
|
+
"metric_name": "HourlyAnalyticsPaginationCompleted",
|
|
576
|
+
"iterations": pagination_iterations,
|
|
577
|
+
"elapsed_seconds": pagination_elapsed,
|
|
578
|
+
"analytics_collected": len(all_analytics),
|
|
579
|
+
"tenant_id": tenant_id
|
|
580
|
+
}
|
|
581
|
+
)
|
|
582
|
+
|
|
583
|
+
# Filter by time period and group by route
|
|
584
|
+
routes_with_data: Set[str] = set()
|
|
585
|
+
for analytics in all_analytics:
|
|
586
|
+
if period_start_ts <= analytics.created_utc_ts <= period_end_ts:
|
|
587
|
+
if analytics.route:
|
|
588
|
+
routes_with_data.add(analytics.route)
|
|
589
|
+
|
|
590
|
+
if not routes_with_data:
|
|
591
|
+
logger.info("No routes with analytics data in the specified period")
|
|
592
|
+
return ServiceResult.success_result([])
|
|
593
|
+
|
|
594
|
+
# Aggregate each route
|
|
595
|
+
return self.aggregate_multiple_routes(
|
|
596
|
+
list(routes_with_data), tenant_id, user_id,
|
|
597
|
+
period_start_ts, period_end_ts, "hourly"
|
|
598
|
+
)
|
|
599
|
+
|
|
600
|
+
except Exception as e:
|
|
601
|
+
logger.error(f"Error in hourly aggregation: {str(e)}")
|
|
602
|
+
return ServiceResult.exception_result(
|
|
603
|
+
e,
|
|
604
|
+
error_code=ErrorCode.OPERATION_FAILED,
|
|
605
|
+
context="Failed to process hourly aggregation"
|
|
606
|
+
)
|
|
607
|
+
|
|
608
|
+
def _get_current_timestamp(self) -> float:
|
|
609
|
+
"""Get current UTC timestamp."""
|
|
610
|
+
return dt.datetime.now(dt.UTC).timestamp()
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
# src/geek_cafe_saas_sdk/lambda_handlers/users/create/app.py
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from typing import Dict, Any
|
|
5
|
+
|
|
6
|
+
from geek_cafe_saas_sdk.services import UserService
|
|
7
|
+
from geek_cafe_saas_sdk.lambda_handlers import ServicePool
|
|
8
|
+
from geek_cafe_saas_sdk.utilities.response import service_result_to_response, error_response
|
|
9
|
+
from geek_cafe_saas_sdk.utilities.lambda_event_utility import LambdaEventUtility
|
|
10
|
+
|
|
11
|
+
user_service_pool = ServicePool(UserService)
|
|
12
|
+
|
|
13
|
+
def handler(event: Dict[str, Any], context: object, injected_service=None) -> Dict[str, Any]:
|
|
14
|
+
"""
|
|
15
|
+
Lambda handler for creating a new user.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
event: API Gateway event
|
|
19
|
+
context: Lambda context
|
|
20
|
+
injected_service: Optional UserService for testing
|
|
21
|
+
"""
|
|
22
|
+
try:
|
|
23
|
+
# Use injected service (testing) or pool (production)
|
|
24
|
+
user_service = injected_service if injected_service else user_service_pool.get()
|
|
25
|
+
body = LambdaEventUtility.get_body_from_event(event)
|
|
26
|
+
user_id = LambdaEventUtility.get_authenticated_user_id(event)
|
|
27
|
+
tenant_id = LambdaEventUtility.get_authenticated_user_tenant_id(event)
|
|
28
|
+
|
|
29
|
+
# Pass all body parameters to the service
|
|
30
|
+
result = user_service.create(
|
|
31
|
+
tenant_id=tenant_id,
|
|
32
|
+
user_id=user_id,
|
|
33
|
+
**body
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
return service_result_to_response(result, success_status=201)
|
|
37
|
+
|
|
38
|
+
except json.JSONDecodeError:
|
|
39
|
+
return error_response("Invalid JSON in request body.", "VALIDATION_ERROR", 400)
|
|
40
|
+
except Exception as e:
|
|
41
|
+
return error_response(f"An unexpected error occurred: {str(e)}", "INTERNAL_ERROR", 500)
|