GameSentenceMiner 2.18.15__py3-none-any.whl → 2.18.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. GameSentenceMiner/anki.py +8 -53
  2. GameSentenceMiner/owocr/owocr/ocr.py +3 -2
  3. GameSentenceMiner/owocr/owocr/run.py +5 -1
  4. GameSentenceMiner/ui/anki_confirmation.py +16 -2
  5. GameSentenceMiner/util/configuration.py +6 -9
  6. GameSentenceMiner/util/db.py +11 -7
  7. GameSentenceMiner/util/games_table.py +320 -0
  8. GameSentenceMiner/web/anki_api_endpoints.py +506 -0
  9. GameSentenceMiner/web/database_api.py +239 -117
  10. GameSentenceMiner/web/static/css/loading-skeleton.css +41 -0
  11. GameSentenceMiner/web/static/css/search.css +54 -0
  12. GameSentenceMiner/web/static/css/stats.css +76 -0
  13. GameSentenceMiner/web/static/js/anki_stats.js +304 -50
  14. GameSentenceMiner/web/static/js/database.js +44 -7
  15. GameSentenceMiner/web/static/js/heatmap.js +326 -0
  16. GameSentenceMiner/web/static/js/overview.js +20 -224
  17. GameSentenceMiner/web/static/js/search.js +190 -23
  18. GameSentenceMiner/web/static/js/stats.js +371 -1
  19. GameSentenceMiner/web/stats.py +188 -0
  20. GameSentenceMiner/web/templates/anki_stats.html +145 -58
  21. GameSentenceMiner/web/templates/components/date-range.html +19 -0
  22. GameSentenceMiner/web/templates/components/html-head.html +45 -0
  23. GameSentenceMiner/web/templates/components/js-config.html +37 -0
  24. GameSentenceMiner/web/templates/components/popups.html +15 -0
  25. GameSentenceMiner/web/templates/components/settings-modal.html +233 -0
  26. GameSentenceMiner/web/templates/database.html +13 -3
  27. GameSentenceMiner/web/templates/goals.html +9 -31
  28. GameSentenceMiner/web/templates/overview.html +16 -223
  29. GameSentenceMiner/web/templates/search.html +46 -0
  30. GameSentenceMiner/web/templates/stats.html +49 -311
  31. GameSentenceMiner/web/texthooking_page.py +4 -66
  32. {gamesentenceminer-2.18.15.dist-info → gamesentenceminer-2.18.17.dist-info}/METADATA +1 -1
  33. {gamesentenceminer-2.18.15.dist-info → gamesentenceminer-2.18.17.dist-info}/RECORD +37 -28
  34. {gamesentenceminer-2.18.15.dist-info → gamesentenceminer-2.18.17.dist-info}/WHEEL +0 -0
  35. {gamesentenceminer-2.18.15.dist-info → gamesentenceminer-2.18.17.dist-info}/entry_points.txt +0 -0
  36. {gamesentenceminer-2.18.15.dist-info → gamesentenceminer-2.18.17.dist-info}/licenses/LICENSE +0 -0
  37. {gamesentenceminer-2.18.15.dist-info → gamesentenceminer-2.18.17.dist-info}/top_level.txt +0 -0
@@ -17,10 +17,11 @@ from GameSentenceMiner.util.db import GameLinesTable
17
17
  from GameSentenceMiner.util.configuration import get_stats_config, logger, get_config, save_current_config, save_stats_config
18
18
  from GameSentenceMiner.util.text_log import GameLine
19
19
  from GameSentenceMiner.web.stats import (
20
- calculate_kanji_frequency, calculate_heatmap_data, calculate_total_chars_per_game,
21
- calculate_reading_time_per_game, calculate_reading_speed_per_game,
20
+ calculate_kanji_frequency, calculate_heatmap_data, calculate_mining_heatmap_data,
21
+ calculate_total_chars_per_game, calculate_reading_time_per_game, calculate_reading_speed_per_game,
22
22
  calculate_current_game_stats, calculate_all_games_stats, calculate_daily_reading_time,
23
- calculate_time_based_streak, calculate_actual_reading_time
23
+ calculate_time_based_streak, calculate_actual_reading_time, calculate_hourly_activity,
24
+ calculate_hourly_reading_speed, calculate_peak_daily_stats, calculate_peak_session_stats
24
25
  )
25
26
 
26
27
 
@@ -47,7 +48,7 @@ def register_database_api_routes(app):
47
48
 
48
49
  if page < 1:
49
50
  page = 1
50
- if page_size < 1 or page_size > 100:
51
+ if page_size < 1 or page_size > 200:
51
52
  page_size = 20
52
53
 
53
54
  if use_regex:
@@ -218,6 +219,54 @@ def register_database_api_routes(app):
218
219
  logger.error(f"Error fetching games list: {e}", exc_info=True)
219
220
  return jsonify({'error': 'Failed to fetch games list'}), 500
220
221
 
222
+ @app.route('/api/delete-sentence-lines', methods=['POST'])
223
+ def api_delete_sentence_lines():
224
+ """
225
+ Delete specific sentence lines by their IDs.
226
+ """
227
+ try:
228
+ data = request.get_json()
229
+ line_ids = data.get('line_ids', [])
230
+
231
+ if not line_ids:
232
+ return jsonify({'error': 'No line IDs provided'}), 400
233
+
234
+ if not isinstance(line_ids, list):
235
+ return jsonify({'error': 'line_ids must be a list'}), 400
236
+
237
+ # Delete the lines
238
+ deleted_count = 0
239
+ failed_ids = []
240
+
241
+ for line_id in line_ids:
242
+ try:
243
+ GameLinesTable._db.execute(
244
+ f"DELETE FROM {GameLinesTable._table} WHERE id=?",
245
+ (line_id,),
246
+ commit=True
247
+ )
248
+ deleted_count += 1
249
+ except Exception as e:
250
+ logger.warning(f"Failed to delete line {line_id}: {e}")
251
+ failed_ids.append(line_id)
252
+
253
+ logger.info(f"Deleted {deleted_count} sentence lines out of {len(line_ids)} requested")
254
+
255
+ response_data = {
256
+ 'deleted_count': deleted_count,
257
+ 'message': f'Successfully deleted {deleted_count} {"sentence" if deleted_count == 1 else "sentences"}'
258
+ }
259
+
260
+ if failed_ids:
261
+ response_data['warning'] = f'{len(failed_ids)} lines failed to delete'
262
+ response_data['failed_ids'] = failed_ids
263
+
264
+ return jsonify(response_data), 200
265
+
266
+ except Exception as e:
267
+ logger.error(f"Error in sentence line deletion: {e}")
268
+ return jsonify({'error': f'Failed to delete sentences: {str(e)}'}), 500
269
+
221
270
  @app.route('/api/delete-games', methods=['POST'])
222
271
  def api_delete_games():
223
272
  """
@@ -641,6 +690,7 @@ def register_database_api_routes(app):
641
690
  def api_preview_deduplication():
642
691
  """
643
692
  Preview duplicate sentences that would be removed based on time window and game selection.
693
+ Supports ignore_time_window parameter to find all duplicates regardless of time.
644
694
  """
645
695
  try:
646
696
  data = request.get_json()
@@ -650,6 +700,7 @@ def register_database_api_routes(app):
650
700
  games = data.get('games', [])
651
701
  time_window_minutes = data.get('time_window_minutes', 5)
652
702
  case_sensitive = data.get('case_sensitive', False)
703
+ ignore_time_window = data.get('ignore_time_window', False)
653
704
 
654
705
  if not games:
655
706
  return jsonify({'error': 'At least one game must be selected'}), 400
@@ -680,24 +731,19 @@ def register_database_api_routes(app):
680
731
  duplicate_samples = {}
681
732
  time_window_seconds = time_window_minutes * 60
682
733
 
683
- # Find duplicates within time window for each game
734
+ # Find duplicates for each game
684
735
  for game_name, lines in game_lines.items():
685
- text_timeline = []
686
-
687
- for line in lines:
688
- if not line.line_text or not line.line_text.strip():
689
- continue
690
-
691
- line_text = line.line_text if case_sensitive else line.line_text.lower()
692
- timestamp = float(line.timestamp)
693
-
694
- # Check for duplicates within time window
695
- for prev_text, prev_timestamp, prev_line_id in reversed(text_timeline):
696
- if timestamp - prev_timestamp > time_window_seconds:
697
- break # Outside time window
736
+ if ignore_time_window:
737
+ # Find all duplicates regardless of time
738
+ seen_texts = {}
739
+ for line in lines:
740
+ if not line.line_text or not line.line_text.strip():
741
+ continue
698
742
 
699
- if prev_text == line_text:
700
- # Found duplicate within time window
743
+ line_text = line.line_text if case_sensitive else line.line_text.lower()
744
+
745
+ if line_text in seen_texts:
746
+ # Found duplicate
701
747
  duplicates_to_remove.append(line.id)
702
748
 
703
749
  # Store sample for preview
@@ -707,9 +753,38 @@ def register_database_api_routes(app):
707
753
  'occurrences': 1
708
754
  }
709
755
  duplicate_samples[line_text]['occurrences'] += 1
710
- break
756
+ else:
757
+ seen_texts[line_text] = line.id
758
+ else:
759
+ # Find duplicates within time window (original logic)
760
+ text_timeline = []
711
761
 
712
- text_timeline.append((line_text, timestamp, line.id))
762
+ for line in lines:
763
+ if not line.line_text or not line.line_text.strip():
764
+ continue
765
+
766
+ line_text = line.line_text if case_sensitive else line.line_text.lower()
767
+ timestamp = float(line.timestamp)
768
+
769
+ # Check for duplicates within time window
770
+ for prev_text, prev_timestamp, prev_line_id in reversed(text_timeline):
771
+ if timestamp - prev_timestamp > time_window_seconds:
772
+ break # Outside time window
773
+
774
+ if prev_text == line_text:
775
+ # Found duplicate within time window
776
+ duplicates_to_remove.append(line.id)
777
+
778
+ # Store sample for preview
779
+ if line_text not in duplicate_samples:
780
+ duplicate_samples[line_text] = {
781
+ 'text': line.line_text, # Original case
782
+ 'occurrences': 1
783
+ }
784
+ duplicate_samples[line_text]['occurrences'] += 1
785
+ break
786
+
787
+ text_timeline.append((line_text, timestamp, line.id))
713
788
 
714
789
  # Calculate statistics
715
790
  duplicates_count = len(duplicates_to_remove)
@@ -734,6 +809,7 @@ def register_database_api_routes(app):
734
809
  def api_deduplicate():
735
810
  """
736
811
  Remove duplicate sentences from database based on time window and game selection.
812
+ Supports ignore_time_window parameter to remove all duplicates regardless of time.
737
813
  """
738
814
  try:
739
815
  data = request.get_json()
@@ -744,6 +820,7 @@ def register_database_api_routes(app):
744
820
  time_window_minutes = data.get('time_window_minutes', 5)
745
821
  case_sensitive = data.get('case_sensitive', False)
746
822
  preserve_newest = data.get('preserve_newest', False)
823
+ ignore_time_window = data.get('ignore_time_window', False)
747
824
 
748
825
  if not games:
749
826
  return jsonify({'error': 'At least one game must be selected'}), 400
@@ -773,40 +850,62 @@ def register_database_api_routes(app):
773
850
  duplicates_to_remove = []
774
851
  time_window_seconds = time_window_minutes * 60
775
852
 
776
- # Find duplicates within time window for each game
853
+ # Find duplicates for each game
777
854
  for game_name, lines in game_lines.items():
778
- text_timeline = []
779
-
780
- for line in lines:
781
- if not line.line_text or not line.line_text.strip():
782
- continue
783
-
784
- line_text = line.line_text if case_sensitive else line.line_text.lower()
785
- timestamp = float(line.timestamp)
786
-
787
- # Check for duplicates within time window
788
- duplicate_found = False
789
- for i, (prev_text, prev_timestamp, prev_line_id) in enumerate(reversed(text_timeline)):
790
- if timestamp - prev_timestamp > time_window_seconds:
791
- break # Outside time window
855
+ if ignore_time_window:
856
+ # Find all duplicates regardless of time
857
+ seen_texts = {}
858
+ for line in lines:
859
+ if not line.line_text or not line.line_text.strip():
860
+ continue
792
861
 
793
- if prev_text == line_text:
794
- # Found duplicate within time window
862
+ line_text = line.line_text if case_sensitive else line.line_text.lower()
863
+
864
+ if line_text in seen_texts:
865
+ # Found duplicate
795
866
  if preserve_newest:
796
867
  # Remove the older one (previous)
797
- duplicates_to_remove.append(prev_line_id)
798
- # Update timeline to replace old entry with new one
799
- timeline_index = len(text_timeline) - 1 - i
800
- text_timeline[timeline_index] = (line_text, timestamp, line.id)
868
+ duplicates_to_remove.append(seen_texts[line_text])
869
+ seen_texts[line_text] = line.id # Update to keep newest
801
870
  else:
802
871
  # Remove the newer one (current)
803
872
  duplicates_to_remove.append(line.id)
804
-
805
- duplicate_found = True
806
- break
873
+ else:
874
+ seen_texts[line_text] = line.id
875
+ else:
876
+ # Find duplicates within time window (original logic)
877
+ text_timeline = []
807
878
 
808
- if not duplicate_found:
809
- text_timeline.append((line_text, timestamp, line.id))
879
+ for line in lines:
880
+ if not line.line_text or not line.line_text.strip():
881
+ continue
882
+
883
+ line_text = line.line_text if case_sensitive else line.line_text.lower()
884
+ timestamp = float(line.timestamp)
885
+
886
+ # Check for duplicates within time window
887
+ duplicate_found = False
888
+ for i, (prev_text, prev_timestamp, prev_line_id) in enumerate(reversed(text_timeline)):
889
+ if timestamp - prev_timestamp > time_window_seconds:
890
+ break # Outside time window
891
+
892
+ if prev_text == line_text:
893
+ # Found duplicate within time window
894
+ if preserve_newest:
895
+ # Remove the older one (previous)
896
+ duplicates_to_remove.append(prev_line_id)
897
+ # Update timeline to replace old entry with new one
898
+ timeline_index = len(text_timeline) - 1 - i
899
+ text_timeline[timeline_index] = (line_text, timestamp, line.id)
900
+ else:
901
+ # Remove the newer one (current)
902
+ duplicates_to_remove.append(line.id)
903
+
904
+ duplicate_found = True
905
+ break
906
+
907
+ if not duplicate_found:
908
+ text_timeline.append((line_text, timestamp, line.id))
810
909
 
811
910
  # Delete the duplicate lines
812
911
  deleted_count = 0
@@ -821,7 +920,8 @@ def register_database_api_routes(app):
821
920
  except Exception as e:
822
921
  logger.warning(f"Failed to delete duplicate line {line_id}: {e}")
823
922
 
824
- logger.info(f"Deduplication completed: removed {deleted_count} duplicate sentences from {len(games)} games with {time_window_minutes}min window")
923
+ mode_desc = "entire game" if ignore_time_window else f"{time_window_minutes}min window"
924
+ logger.info(f"Deduplication completed: removed {deleted_count} duplicate sentences from {len(games)} games with {mode_desc}")
825
925
 
826
926
  return jsonify({
827
927
  'deleted_count': deleted_count,
@@ -832,6 +932,27 @@ def register_database_api_routes(app):
832
932
  logger.error(f"Error in deduplication: {e}")
833
933
  return jsonify({'error': f'Deduplication failed: {str(e)}'}), 500
834
934
 
935
+ @app.route('/api/deduplicate-entire-game', methods=['POST'])
936
+ def api_deduplicate_entire_game():
937
+ """
938
+ Remove duplicate sentences from database across entire games without time window restrictions.
939
+ This is a convenience endpoint that calls the main deduplicate function with ignore_time_window=True.
940
+ """
941
+ try:
942
+ data = request.get_json()
943
+ if not data:
944
+ return jsonify({'error': 'No data provided'}), 400
945
+
946
+ # Add ignore_time_window=True to the request data
947
+ data['ignore_time_window'] = True
948
+
949
+ # Call the main deduplication function
950
+ return api_deduplicate()
951
+
952
+ except Exception as e:
953
+ logger.error(f"Error in entire game deduplication: {e}")
954
+ return jsonify({'error': f'Entire game deduplication failed: {str(e)}'}), 500
955
+
835
956
  @app.route('/api/merge_games', methods=['POST'])
836
957
  def api_merge_games():
837
958
  """
@@ -1011,73 +1132,8 @@ def register_database_api_routes(app):
1011
1132
  for i, game in enumerate(game_names):
1012
1133
  color = colors[i % len(colors)]
1013
1134
 
1014
- # 1. Fetch all lines and sort them chronologically
1015
- try:
1016
- all_lines = sorted(GameLinesTable.all(), key=lambda line: line.timestamp)
1017
- except Exception as e:
1018
- logger.error(f"Error fetching lines from database: {e}")
1019
- return jsonify({'error': 'Failed to fetch data from database'}), 500
1020
-
1021
- if not all_lines:
1022
- return jsonify({"labels": [], "datasets": []})
1023
-
1024
- # 2. Process data into daily totals for each game
1025
- # Structure: daily_data[date_str][game_name] = {'lines': N, 'chars': N}
1026
- daily_data = defaultdict(lambda: defaultdict(lambda: {'lines': 0, 'chars': 0}))
1027
-
1028
- try:
1029
- # start_time = time.perf_counter()
1030
- # for line in all_lines:
1031
- # day_str = datetime.date.fromtimestamp(float(line.timestamp)).strftime('%Y-%m-%d')
1032
- # game = line.game_name or "Unknown Game"
1033
- # daily_data[day_str][game]['lines'] += 1
1034
- # daily_data[day_str][game]['chars'] += len(line.line_text) if line.line_text else 0
1035
- # end_time = time.perf_counter()
1036
- # logger.info(f"Without Punctuation removal and daily aggregation took {end_time - start_time:.4f} seconds for {len(all_lines)} lines")
1037
-
1038
- # start_time = time.perf_counter()
1039
- wrong_instance_found = False
1040
- for line in all_lines:
1041
- day_str = datetime.date.fromtimestamp(float(line.timestamp)).strftime('%Y-%m-%d')
1042
- game = line.game_name or "Unknown Game"
1043
- # Remove punctuation and symbols from line text before counting characters
1044
- clean_text = punctionation_regex.sub('', str(line.line_text)) if line.line_text else ''
1045
- if not isinstance(clean_text, str) and not wrong_instance_found:
1046
- logger.info(f"Non-string line_text encountered: {clean_text} (type: {type(clean_text)})")
1047
- wrong_instance_found = True
1048
-
1049
- line.line_text = clean_text # Update line text to cleaned version for future use
1050
- daily_data[day_str][game]['lines'] += 1
1051
- daily_data[day_str][game]['chars'] += len(clean_text)
1052
- # end_time = time.perf_counter()
1053
- # logger.info(f"With Punctuation removal and daily aggregation took {end_time - start_time:.4f} seconds for {len(all_lines)} lines")
1054
- except Exception as e:
1055
- logger.error(f"Error processing daily data: {e}")
1056
- return jsonify({'error': 'Failed to process daily data'}), 500
1057
-
1058
- # 3. Create cumulative datasets for Chart.js
1059
- try:
1060
- sorted_days = sorted(daily_data.keys())
1061
- game_names = GameLinesTable.get_all_games_with_lines()
1062
-
1063
- # Keep track of the running total for each metric for each game
1064
- cumulative_totals = defaultdict(lambda: {'lines': 0, 'chars': 0})
1065
-
1066
- # Structure for final data: final_data[game_name][metric] = [day1_val, day2_val, ...]
1067
- final_data = defaultdict(lambda: defaultdict(list))
1068
-
1069
- for day in sorted_days:
1070
- for game in game_names:
1071
- # Add the day's total to the cumulative total
1072
- cumulative_totals[game]['lines'] += daily_data[day][game]['lines']
1073
- cumulative_totals[game]['chars'] += daily_data[day][game]['chars']
1074
-
1075
- # Append the new cumulative total to the list for that day
1076
- final_data[game]['lines'].append(cumulative_totals[game]['lines'])
1077
- final_data[game]['chars'].append(cumulative_totals[game]['chars'])
1078
- except Exception as e:
1079
- logger.error(f"Error creating cumulative datasets: {e}")
1080
- return jsonify({'error': 'Failed to create datasets'}), 500
1135
+ # Note: We already have filtered data in all_lines from line 965, so we don't need to fetch again
1136
+ # The duplicate data fetching that was here has been removed to fix the date range filtering issue
1081
1137
 
1082
1138
  # 4. Format into Chart.js dataset structure
1083
1139
  try:
@@ -1168,6 +1224,33 @@ def register_database_api_routes(app):
1168
1224
  logger.error(f"Error preparing all lines data: {e}")
1169
1225
  all_lines_data = []
1170
1226
 
1227
+ # 8. Calculate hourly activity pattern
1228
+ try:
1229
+ hourly_activity_data = calculate_hourly_activity(all_lines)
1230
+ except Exception as e:
1231
+ logger.error(f"Error calculating hourly activity: {e}")
1232
+ hourly_activity_data = [0] * 24
1233
+
1234
+ # 8.5. Calculate hourly reading speed pattern
1235
+ try:
1236
+ hourly_reading_speed_data = calculate_hourly_reading_speed(all_lines)
1237
+ except Exception as e:
1238
+ logger.error(f"Error calculating hourly reading speed: {e}")
1239
+ hourly_reading_speed_data = [0] * 24
1240
+
1241
+ # 9. Calculate peak statistics
1242
+ try:
1243
+ peak_daily_stats = calculate_peak_daily_stats(all_lines)
1244
+ except Exception as e:
1245
+ logger.error(f"Error calculating peak daily stats: {e}")
1246
+ peak_daily_stats = {'max_daily_chars': 0, 'max_daily_hours': 0.0}
1247
+
1248
+ try:
1249
+ peak_session_stats = calculate_peak_session_stats(all_lines)
1250
+ except Exception as e:
1251
+ logger.error(f"Error calculating peak session stats: {e}")
1252
+ peak_session_stats = {'longest_session_hours': 0.0, 'max_session_chars': 0}
1253
+
1171
1254
  return jsonify({
1172
1255
  "labels": sorted_days,
1173
1256
  "datasets": datasets,
@@ -1178,13 +1261,52 @@ def register_database_api_routes(app):
1178
1261
  "readingSpeedPerGame": reading_speed_per_game_data,
1179
1262
  "currentGameStats": current_game_stats,
1180
1263
  "allGamesStats": all_games_stats,
1181
- "allLinesData": all_lines_data
1264
+ "allLinesData": all_lines_data,
1265
+ "hourlyActivityData": hourly_activity_data,
1266
+ "hourlyReadingSpeedData": hourly_reading_speed_data,
1267
+ "peakDailyStats": peak_daily_stats,
1268
+ "peakSessionStats": peak_session_stats
1182
1269
  })
1183
1270
 
1184
1271
  except Exception as e:
1185
1272
  logger.error(f"Unexpected error in api_stats: {e}", exc_info=True)
1186
1273
  return jsonify({'error': 'Failed to generate statistics'}), 500
1187
1274
 
1275
+ @app.route('/api/mining_heatmap')
1276
+ def api_mining_heatmap():
1277
+ """
1278
+ Provides mining heatmap data showing daily mining activity.
1279
+ Counts lines where screenshot_in_anki OR audio_in_anki is not empty.
1280
+ Accepts optional 'start' and 'end' timestamp parameters for filtering.
1281
+ """
1282
+ try:
1283
+ # Get optional timestamp filter parameters
1284
+ start_timestamp = request.args.get('start', None)
1285
+ end_timestamp = request.args.get('end', None)
1286
+
1287
+ # Convert timestamps to float if provided
1288
+ start_timestamp = float(start_timestamp) if start_timestamp else None
1289
+ end_timestamp = float(end_timestamp) if end_timestamp else None
1290
+
1291
+ # Fetch lines filtered by timestamp
1292
+ all_lines = GameLinesTable.get_lines_filtered_by_timestamp(start=start_timestamp, end=end_timestamp)
1293
+
1294
+ if not all_lines:
1295
+ return jsonify({}), 200
1296
+
1297
+ # Calculate mining heatmap data
1298
+ try:
1299
+ heatmap_data = calculate_mining_heatmap_data(all_lines)
1300
+ except Exception as e:
1301
+ logger.error(f"Error calculating mining heatmap data: {e}")
1302
+ return jsonify({'error': 'Failed to calculate mining heatmap'}), 500
1303
+
1304
+ return jsonify(heatmap_data), 200
1305
+
1306
+ except Exception as e:
1307
+ logger.error(f"Unexpected error in api_mining_heatmap: {e}", exc_info=True)
1308
+ return jsonify({'error': 'Failed to generate mining heatmap'}), 500
1309
+
1188
1310
  @app.route('/api/goals-today', methods=['GET'])
1189
1311
  def api_goals_today():
1190
1312
  """
@@ -0,0 +1,41 @@
1
+ /* Loading skeleton animations for better perceived performance */
2
+ .loading-skeleton {
3
+ background: linear-gradient(90deg, #f0f0f0 25%, #e0e0e0 50%, #f0f0f0 75%);
4
+ background-size: 200% 100%;
5
+ animation: loading 1.5s infinite;
6
+ border-radius: 4px;
7
+ display: inline-block;
8
+ }
9
+
10
+ @keyframes loading {
11
+ 0% {
12
+ background-position: 200% 0;
13
+ }
14
+ 100% {
15
+ background-position: -200% 0;
16
+ }
17
+ }
18
+
19
+ /* Dark mode support */
20
+ @media (prefers-color-scheme: dark) {
21
+ .loading-skeleton {
22
+ background: linear-gradient(90deg, #2a2a2a 25%, #3a3a3a 50%, #2a2a2a 75%);
23
+ background-size: 200% 100%;
24
+ }
25
+ }
26
+
27
+ /* Custom skeleton variants */
28
+ .loading-skeleton.wide {
29
+ width: 80px;
30
+ height: 20px;
31
+ }
32
+
33
+ .loading-skeleton.narrow {
34
+ width: 40px;
35
+ height: 16px;
36
+ }
37
+
38
+ .loading-skeleton.tall {
39
+ width: 60px;
40
+ height: 28px;
41
+ }
@@ -1,5 +1,59 @@
1
1
  /* Search Page Specific Styles */
2
2
 
3
+ /* Checkbox styling for search results */
4
+ .line-checkbox {
5
+ margin-top: 4px;
6
+ cursor: pointer;
7
+ min-width: 16px;
8
+ min-height: 16px;
9
+ }
10
+
11
+ /* Results controls styling */
12
+ .results-controls {
13
+ display: flex;
14
+ align-items: center;
15
+ gap: 8px;
16
+ margin-left: auto;
17
+ }
18
+
19
+ .results-controls .filter-group {
20
+ margin: 0;
21
+ }
22
+
23
+ .results-controls .filter-label {
24
+ font-size: 0.9rem;
25
+ margin-right: 6px;
26
+ }
27
+
28
+ .results-controls .filter-select {
29
+ min-width: 80px;
30
+ }
31
+
32
+ /* Search-specific responsive styles */
33
+ @media (max-width: 768px) {
34
+ .navigation {
35
+ padding: 10px;
36
+ }
37
+
38
+ .nav-link {
39
+ display: block;
40
+ margin: 5px 0;
41
+ text-align: center;
42
+ }
43
+
44
+ .results-controls {
45
+ flex-direction: column;
46
+ align-items: flex-start;
47
+ gap: 8px;
48
+ margin-left: 0;
49
+ margin-top: 8px;
50
+ }
51
+
52
+ .results-controls .filter-group {
53
+ margin-right: 0;
54
+ }
55
+ }
56
+
3
57
  /* Search-specific responsive styles */
4
58
  @media (max-width: 768px) {
5
59
  .navigation {
@@ -829,4 +829,80 @@
829
829
  padding: 2px 6px;
830
830
  margin-left: 3px;
831
831
  }
832
+ }
833
+
834
+ /* ================================
835
+ Game Stats Table Styling
836
+ ================================ */
837
+ .stats-table {
838
+ width: 100%;
839
+ border-collapse: collapse;
840
+ margin-top: 15px;
841
+ }
842
+
843
+ .stats-table thead {
844
+ background: var(--bg-secondary);
845
+ border-bottom: 2px solid var(--border-color);
846
+ }
847
+
848
+ .stats-table th {
849
+ padding: 12px 16px;
850
+ text-align: left;
851
+ font-weight: 600;
852
+ color: var(--text-primary);
853
+ font-size: 14px;
854
+ text-transform: uppercase;
855
+ letter-spacing: 0.5px;
856
+ }
857
+
858
+ .stats-table tbody tr {
859
+ border-bottom: 1px solid var(--border-color);
860
+ transition: background-color 0.2s ease;
861
+ }
862
+
863
+ .stats-table tbody tr:hover {
864
+ background: var(--bg-secondary);
865
+ }
866
+
867
+ .stats-table td {
868
+ padding: 12px 16px;
869
+ color: var(--text-secondary);
870
+ font-size: 14px;
871
+ }
872
+
873
+ .stats-table td:first-child {
874
+ font-weight: 500;
875
+ color: var(--text-primary);
876
+ }
877
+
878
+ .empty-message {
879
+ text-align: center;
880
+ padding: 40px 20px;
881
+ color: var(--text-tertiary);
882
+ font-style: italic;
883
+ }
884
+
885
+ #gameStatsTableContainer {
886
+ min-height: 100px;
887
+ }
888
+
889
+ /* Responsive design for game stats table */
890
+ @media (max-width: 768px) {
891
+ .stats-table th,
892
+ .stats-table td {
893
+ padding: 10px 12px;
894
+ font-size: 13px;
895
+ }
896
+ }
897
+
898
+ @media (max-width: 480px) {
899
+ .stats-table th,
900
+ .stats-table td {
901
+ padding: 8px 10px;
902
+ font-size: 12px;
903
+ }
904
+
905
+ .stats-table th {
906
+ font-size: 11px;
907
+ }
832
908
  }