GameSentenceMiner 2.16.7__py3-none-any.whl → 2.16.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- GameSentenceMiner/config_gui.py +54 -4
- GameSentenceMiner/gsm.py +89 -40
- GameSentenceMiner/locales/en_us.json +16 -0
- GameSentenceMiner/locales/ja_jp.json +16 -0
- GameSentenceMiner/locales/zh_cn.json +16 -0
- GameSentenceMiner/util/configuration.py +5 -0
- GameSentenceMiner/util/db.py +83 -2
- GameSentenceMiner/util/ffmpeg.py +1 -0
- GameSentenceMiner/util/get_overlay_coords.py +29 -1
- GameSentenceMiner/vad.py +3 -13
- GameSentenceMiner/web/database_api.py +54 -29
- GameSentenceMiner/web/static/js/search.js +1 -1
- GameSentenceMiner/web/static/js/stats.js +35 -11
- GameSentenceMiner/web/templates/stats.html +467 -425
- {gamesentenceminer-2.16.7.dist-info → gamesentenceminer-2.16.9.dist-info}/METADATA +1 -1
- {gamesentenceminer-2.16.7.dist-info → gamesentenceminer-2.16.9.dist-info}/RECORD +20 -20
- {gamesentenceminer-2.16.7.dist-info → gamesentenceminer-2.16.9.dist-info}/WHEEL +0 -0
- {gamesentenceminer-2.16.7.dist-info → gamesentenceminer-2.16.9.dist-info}/entry_points.txt +0 -0
- {gamesentenceminer-2.16.7.dist-info → gamesentenceminer-2.16.9.dist-info}/licenses/LICENSE +0 -0
- {gamesentenceminer-2.16.7.dist-info → gamesentenceminer-2.16.9.dist-info}/top_level.txt +0 -0
GameSentenceMiner/util/db.py
CHANGED
|
@@ -1,16 +1,22 @@
|
|
|
1
1
|
|
|
2
2
|
|
|
3
|
+
from datetime import datetime
|
|
3
4
|
import json
|
|
4
5
|
import os
|
|
6
|
+
import random
|
|
5
7
|
import shutil
|
|
6
8
|
import sqlite3
|
|
7
9
|
from sys import platform
|
|
8
10
|
import time
|
|
9
11
|
from typing import Any, Dict, List, Optional, Tuple, Union, Type, TypeVar
|
|
10
12
|
import threading
|
|
13
|
+
import uuid
|
|
14
|
+
|
|
15
|
+
import pytz
|
|
16
|
+
from datetime import timedelta
|
|
11
17
|
|
|
12
18
|
from GameSentenceMiner.util.text_log import GameLine
|
|
13
|
-
from GameSentenceMiner.util.configuration import logger, is_dev
|
|
19
|
+
from GameSentenceMiner.util.configuration import get_stats_config, logger, is_dev
|
|
14
20
|
import gzip
|
|
15
21
|
|
|
16
22
|
|
|
@@ -23,6 +29,14 @@ class SQLiteDB:
|
|
|
23
29
|
def __init__(self, db_path: str):
|
|
24
30
|
self.db_path = db_path
|
|
25
31
|
self._lock = threading.Lock()
|
|
32
|
+
|
|
33
|
+
def backup(self, backup_path: str):
|
|
34
|
+
""" Create a backup of the database using built in SQLite backup API. """
|
|
35
|
+
with self._lock, sqlite3.connect(self.db_path, check_same_thread=False) as conn:
|
|
36
|
+
with sqlite3.connect(backup_path, check_same_thread=False) as backup_conn:
|
|
37
|
+
conn.backup(backup_conn)
|
|
38
|
+
if is_dev:
|
|
39
|
+
logger.debug(f"Database backed up to {backup_path}")
|
|
26
40
|
|
|
27
41
|
def execute(self, query: str, params: Union[Tuple, Dict] = (), commit: bool = False) -> sqlite3.Cursor:
|
|
28
42
|
with self._lock, sqlite3.connect(self.db_path, check_same_thread=False) as conn:
|
|
@@ -341,7 +355,7 @@ class GameLinesTable(SQLiteDBTable):
|
|
|
341
355
|
self.game_name = game_name
|
|
342
356
|
self.line_text = line_text
|
|
343
357
|
self.context = context
|
|
344
|
-
self.timestamp = timestamp if timestamp is not None else
|
|
358
|
+
self.timestamp = timestamp if timestamp is not None else datetime.now().timestamp()
|
|
345
359
|
self.screenshot_in_anki = screenshot_in_anki if screenshot_in_anki is not None else ''
|
|
346
360
|
self.audio_in_anki = audio_in_anki if audio_in_anki is not None else ''
|
|
347
361
|
self.screenshot_path = screenshot_path if screenshot_path is not None else ''
|
|
@@ -388,6 +402,20 @@ class GameLinesTable(SQLiteDBTable):
|
|
|
388
402
|
# logger.info("Adding GameLine to DB: %s", new_line)
|
|
389
403
|
new_line.add()
|
|
390
404
|
return new_line
|
|
405
|
+
|
|
406
|
+
@classmethod
|
|
407
|
+
def add_lines(cls, gamelines: List[GameLine]):
|
|
408
|
+
new_lines = [cls(id=gl.id, game_name=gl.scene,
|
|
409
|
+
line_text=gl.text, timestamp=gl.time.timestamp()) for gl in gamelines]
|
|
410
|
+
# logger.info("Adding %d GameLines to DB", len(new_lines))
|
|
411
|
+
params = [(line.id, line.game_name, line.line_text, line.timestamp, line.screenshot_in_anki,
|
|
412
|
+
line.audio_in_anki, line.screenshot_path, line.audio_path, line.replay_path, line.translation)
|
|
413
|
+
for line in new_lines]
|
|
414
|
+
cls._db.executemany(
|
|
415
|
+
f"INSERT INTO {cls._table} (id, game_name, line_text, timestamp, screenshot_in_anki, audio_in_anki, screenshot_path, audio_path, replay_path, translation) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
|
416
|
+
params,
|
|
417
|
+
commit=True
|
|
418
|
+
)
|
|
391
419
|
|
|
392
420
|
|
|
393
421
|
def get_db_directory():
|
|
@@ -441,3 +469,56 @@ for cls in [AIModelsTable, GameLinesTable]:
|
|
|
441
469
|
# Uncomment to start fresh every time
|
|
442
470
|
# cls.drop()
|
|
443
471
|
# cls.set_db(gsm_db) # --- IGNORE ---
|
|
472
|
+
|
|
473
|
+
# import random
|
|
474
|
+
# import uuid
|
|
475
|
+
# from datetime import datetime
|
|
476
|
+
# from GameSentenceMiner.util.text_log import GameLine
|
|
477
|
+
# from GameSentenceMiner.util.db import GameLinesTable
|
|
478
|
+
|
|
479
|
+
# List of common Japanese characters (kanji, hiragana, katakana)
|
|
480
|
+
# japanese_chars = (
|
|
481
|
+
# "あいうえおかきくけこさしすせそたちつてとなにぬねのはひふへほまみむめもやゆよらりるれろわをん"
|
|
482
|
+
# "アイウエオカキクケコサシスセソタチツテトナニヌネノハヒフヘホマミムメモヤユヨラリルレロワヲン"
|
|
483
|
+
# "亜唖娃阿哀愛挨悪握圧扱宛嵐安暗案闇以衣位囲医依委威為胃尉異移維緯"
|
|
484
|
+
# # ... (add more kanji for more variety)
|
|
485
|
+
# )
|
|
486
|
+
|
|
487
|
+
# def random_japanese_text(length):
|
|
488
|
+
# return ''.join(random.choices(japanese_chars, k=length))
|
|
489
|
+
|
|
490
|
+
# batch_size = 1000
|
|
491
|
+
# lines_batch = []
|
|
492
|
+
|
|
493
|
+
# def random_datetime(start_year=2024, end_year=2026):
|
|
494
|
+
# start = datetime(start_year, 1, 1, 0, 0, 0)
|
|
495
|
+
# end = datetime(end_year, 12, 31, 23, 59, 59)
|
|
496
|
+
# delta = end - start
|
|
497
|
+
# random_seconds = random.randint(0, int(delta.total_seconds()))
|
|
498
|
+
# return start + timedelta(seconds=random_seconds)
|
|
499
|
+
|
|
500
|
+
# from datetime import timedelta
|
|
501
|
+
|
|
502
|
+
# for i in range(500000): # Adjust for desired number of lines
|
|
503
|
+
# line_text = random_japanese_text(random.randint(25, 40))
|
|
504
|
+
# lines_batch.append(GameLine(
|
|
505
|
+
# id=str(uuid.uuid1()),
|
|
506
|
+
# text=line_text,
|
|
507
|
+
# time=random_datetime(),
|
|
508
|
+
# prev=None,
|
|
509
|
+
# next=None,
|
|
510
|
+
# index=i,
|
|
511
|
+
# scene="RandomScene"
|
|
512
|
+
# ))
|
|
513
|
+
|
|
514
|
+
# if len(lines_batch) >= batch_size:
|
|
515
|
+
# GameLinesTable.add_lines(lines_batch)
|
|
516
|
+
# lines_batch = []
|
|
517
|
+
# if i % 1000 == 0:
|
|
518
|
+
# print(f"Inserted {i} lines...")
|
|
519
|
+
|
|
520
|
+
# # Insert any remaining lines
|
|
521
|
+
# if lines_batch:
|
|
522
|
+
# GameLinesTable.add_lines(lines_batch)
|
|
523
|
+
|
|
524
|
+
print("Done populating GameLinesDB with random Japanese text.")
|
GameSentenceMiner/util/ffmpeg.py
CHANGED
|
@@ -495,6 +495,7 @@ def get_video_timings(video_path, game_line, anki_card_creation_time=None):
|
|
|
495
495
|
total_seconds = file_length - time_delta.total_seconds()
|
|
496
496
|
total_seconds_after_offset = total_seconds + get_config().audio.beginning_offset
|
|
497
497
|
if total_seconds < 0 or total_seconds >= file_length:
|
|
498
|
+
logger.debug(f"get_video_timings: file_mod_time={file_mod_time}, file_length={file_length}, time_delta={time_delta}, total_seconds={total_seconds}, total_seconds_after_offset={total_seconds_after_offset}")
|
|
498
499
|
logger.error("Line mined is outside of the replay buffer! Defaulting to the last 30 seconds of the replay buffer.")
|
|
499
500
|
logger.info("Recommend either increasing replay buffer length in OBS Settings or mining faster.")
|
|
500
501
|
return max(file_length - 30, 0), 0, max(file_length - 30, 0), file_length
|
|
@@ -4,6 +4,7 @@ import base64
|
|
|
4
4
|
import json
|
|
5
5
|
import math
|
|
6
6
|
import os
|
|
7
|
+
import threading
|
|
7
8
|
import time
|
|
8
9
|
from PIL import Image
|
|
9
10
|
from typing import Dict, Any, List, Tuple
|
|
@@ -16,6 +17,7 @@ from GameSentenceMiner.util.configuration import OverlayEngine, get_config, is_w
|
|
|
16
17
|
from GameSentenceMiner.util.electron_config import get_ocr_language
|
|
17
18
|
from GameSentenceMiner.obs import get_screenshot_PIL
|
|
18
19
|
from GameSentenceMiner.web.texthooking_page import send_word_coordinates_to_overlay
|
|
20
|
+
from GameSentenceMiner.web.gsm_websocket import overlay_server_thread
|
|
19
21
|
|
|
20
22
|
# def align_and_correct(ocr_json, reference_text):
|
|
21
23
|
# logger.info(f"Starting align_and_correct with reference_text: '{reference_text}'")
|
|
@@ -80,6 +82,32 @@ try:
|
|
|
80
82
|
import mss
|
|
81
83
|
except ImportError:
|
|
82
84
|
mss = None
|
|
85
|
+
|
|
86
|
+
class OverlayThread(threading.Thread):
|
|
87
|
+
"""
|
|
88
|
+
A thread to run the overlay processing loop.
|
|
89
|
+
This is a simple wrapper around asyncio to run the overlay processing
|
|
90
|
+
in a separate thread.
|
|
91
|
+
"""
|
|
92
|
+
def __init__(self):
|
|
93
|
+
super().__init__()
|
|
94
|
+
self.overlay_processor = OverlayProcessor()
|
|
95
|
+
self.loop = asyncio.new_event_loop()
|
|
96
|
+
self.daemon = True # Ensure thread exits when main program exits
|
|
97
|
+
|
|
98
|
+
def run(self):
|
|
99
|
+
"""Runs the overlay processing loop."""
|
|
100
|
+
asyncio.set_event_loop(self.loop)
|
|
101
|
+
self.loop.run_until_complete(self.overlay_loop())
|
|
102
|
+
|
|
103
|
+
async def overlay_loop(self):
|
|
104
|
+
"""Main loop to periodically process and send overlay data."""
|
|
105
|
+
while True:
|
|
106
|
+
if get_config().overlay.periodic and overlay_server_thread.has_clients():
|
|
107
|
+
await self.overlay_processor.find_box_and_send_to_overlay('')
|
|
108
|
+
await asyncio.sleep(get_config().overlay.periodic_interval) # Adjust the interval as needed
|
|
109
|
+
else:
|
|
110
|
+
await asyncio.sleep(3) # Sleep briefly when not active
|
|
83
111
|
|
|
84
112
|
class OverlayProcessor:
|
|
85
113
|
"""
|
|
@@ -254,7 +282,7 @@ class OverlayProcessor:
|
|
|
254
282
|
return_coords=True,
|
|
255
283
|
multiple_crop_coords=True,
|
|
256
284
|
return_one_box=False,
|
|
257
|
-
furigana_filter_sensitivity=None # Disable furigana filtering
|
|
285
|
+
furigana_filter_sensitivity=None, # Disable furigana filtering
|
|
258
286
|
)
|
|
259
287
|
|
|
260
288
|
# 3. Create a composite image with only the detected text regions
|
GameSentenceMiner/vad.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import tempfile
|
|
2
2
|
import time
|
|
3
3
|
import warnings
|
|
4
|
+
import requests
|
|
4
5
|
from abc import abstractmethod, ABC
|
|
5
6
|
|
|
6
7
|
from GameSentenceMiner.util import configuration, ffmpeg
|
|
@@ -37,19 +38,7 @@ class VADSystem:
|
|
|
37
38
|
if not result.success and get_config().vad.backup_vad_model != configuration.OFF:
|
|
38
39
|
logger.info("No voice activity detected, using backup VAD model.")
|
|
39
40
|
result = self._do_vad_processing(get_config().vad.backup_vad_model, input_audio, output_audio, game_line)
|
|
40
|
-
if not result.success:
|
|
41
|
-
if get_config().vad.add_audio_on_no_results:
|
|
42
|
-
logger.info("No voice activity detected, using full audio.")
|
|
43
|
-
result.output_audio = input_audio
|
|
44
|
-
else:
|
|
45
|
-
logger.info("No voice activity detected.")
|
|
46
|
-
return result
|
|
47
|
-
else:
|
|
48
|
-
logger.info(result.trim_successful_string())
|
|
49
41
|
return result
|
|
50
|
-
else:
|
|
51
|
-
return VADResult(True, 0, get_audio_length(input_audio), "OFF", [], input_audio)
|
|
52
|
-
|
|
53
42
|
|
|
54
43
|
def _do_vad_processing(self, model, input_audio, output_audio, game_line):
|
|
55
44
|
match model:
|
|
@@ -185,7 +174,7 @@ class WhisperVADProcessor(VADProcessor):
|
|
|
185
174
|
# Transcribe the audio using Whisper
|
|
186
175
|
with warnings.catch_warnings():
|
|
187
176
|
warnings.simplefilter("ignore")
|
|
188
|
-
result: WhisperResult = self.vad_model.transcribe(temp_wav, vad=True, language=get_config().vad.language,
|
|
177
|
+
result: WhisperResult = self.vad_model.transcribe(temp_wav, vad=True, language=get_config().vad.language, vad_filter=get_config().vad.use_vad_filter_for_whisper,
|
|
189
178
|
temperature=0.0)
|
|
190
179
|
voice_activity = []
|
|
191
180
|
|
|
@@ -406,6 +395,7 @@ def test_vad_processors():
|
|
|
406
395
|
get_config().vad.cut_and_splice_segments = False
|
|
407
396
|
get_config().vad.trim_beginning = True
|
|
408
397
|
get_config().vad.add_audio_on_no_results = True
|
|
398
|
+
get_config().vad.use_vad_filter_for_whisper = False
|
|
409
399
|
for processor, out_name in processors:
|
|
410
400
|
logger.info("Testing Trim Audio with " + processor.vad_system_name)
|
|
411
401
|
out_path = os.path.join(output_dir, out_name.replace("after_splice_", "after_trim_"))
|
|
@@ -12,6 +12,7 @@ import regex
|
|
|
12
12
|
|
|
13
13
|
from GameSentenceMiner.util.db import GameLinesTable
|
|
14
14
|
from GameSentenceMiner.util.configuration import get_stats_config, logger, get_config, save_current_config, save_stats_config
|
|
15
|
+
from GameSentenceMiner.util.text_log import GameLine
|
|
15
16
|
from GameSentenceMiner.web.stats import (
|
|
16
17
|
calculate_kanji_frequency, calculate_heatmap_data, calculate_total_chars_per_game,
|
|
17
18
|
calculate_reading_time_per_game, calculate_reading_speed_per_game,
|
|
@@ -304,7 +305,7 @@ def register_database_api_routes(app):
|
|
|
304
305
|
'streak_requirement_hours': config.streak_requirement_hours,
|
|
305
306
|
'reading_hours_target': config.reading_hours_target,
|
|
306
307
|
'character_count_target': config.character_count_target,
|
|
307
|
-
'games_target': config.games_target
|
|
308
|
+
'games_target': config.games_target,
|
|
308
309
|
}), 200
|
|
309
310
|
except Exception as e:
|
|
310
311
|
logger.error(f"Error getting settings: {e}")
|
|
@@ -806,6 +807,8 @@ def register_database_api_routes(app):
|
|
|
806
807
|
daily_data = defaultdict(lambda: defaultdict(lambda: {'lines': 0, 'chars': 0}))
|
|
807
808
|
|
|
808
809
|
|
|
810
|
+
|
|
811
|
+
|
|
809
812
|
# start_time = time.perf_counter()
|
|
810
813
|
# for line in all_lines:
|
|
811
814
|
# day_str = datetime.date.fromtimestamp(float(line.timestamp)).strftime('%Y-%m-%d')
|
|
@@ -975,11 +978,15 @@ def register_database_api_routes(app):
|
|
|
975
978
|
csv_reader = csv.DictReader(file_io, quoting=csv.QUOTE_MINIMAL, skipinitialspace=True)
|
|
976
979
|
|
|
977
980
|
# Process CSV rows
|
|
978
|
-
imported_lines = []
|
|
979
981
|
games_set = set()
|
|
980
982
|
errors = []
|
|
981
|
-
|
|
982
|
-
|
|
983
|
+
|
|
984
|
+
all_lines = GameLinesTable.all()
|
|
985
|
+
existing_uuids = {line.id for line in all_lines}
|
|
986
|
+
batch_size = 1000 # For logging progress
|
|
987
|
+
batch_insert = []
|
|
988
|
+
imported_count = 0
|
|
989
|
+
|
|
983
990
|
def get_line_hash(uuid: str, line_text: str) -> str:
|
|
984
991
|
return uuid + '|' + line_text.strip()
|
|
985
992
|
|
|
@@ -1006,10 +1013,11 @@ def register_database_api_routes(app):
|
|
|
1006
1013
|
continue
|
|
1007
1014
|
|
|
1008
1015
|
line_hash = get_line_hash(game_uuid, line)
|
|
1009
|
-
|
|
1010
|
-
|
|
1016
|
+
|
|
1017
|
+
# Check if this line already exists in database
|
|
1018
|
+
if line_hash in existing_uuids:
|
|
1019
|
+
logger.info(f"Skipping duplicate UUID already in database: {line_hash}")
|
|
1011
1020
|
continue
|
|
1012
|
-
seen_uuids.add(line_hash)
|
|
1013
1021
|
|
|
1014
1022
|
# Convert time to timestamp
|
|
1015
1023
|
try:
|
|
@@ -1021,37 +1029,52 @@ def register_database_api_routes(app):
|
|
|
1021
1029
|
# Clean up line text (remove extra whitespace and newlines)
|
|
1022
1030
|
line_text = line.strip()
|
|
1023
1031
|
|
|
1024
|
-
# Check if this line already exists in database
|
|
1025
|
-
existing_line = GameLinesTable.get(line_hash)
|
|
1026
|
-
if existing_line:
|
|
1027
|
-
logger.info(f"Skipping duplicate UUID already in database: {line_hash}")
|
|
1028
|
-
continue
|
|
1029
|
-
|
|
1030
1032
|
# Create GameLinesTable entry
|
|
1031
|
-
|
|
1033
|
+
# Convert timestamp float to datetime object
|
|
1034
|
+
dt = datetime.datetime.fromtimestamp(timestamp)
|
|
1035
|
+
batch_insert.append(GameLine(
|
|
1032
1036
|
id=line_hash,
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
+
text=line_text,
|
|
1038
|
+
scene=game_name,
|
|
1039
|
+
time=dt,
|
|
1040
|
+
prev=None,
|
|
1041
|
+
next=None,
|
|
1042
|
+
index=0,
|
|
1043
|
+
))
|
|
1044
|
+
|
|
1045
|
+
logger.info(f"Batch insert size: {len(batch_insert)}")
|
|
1037
1046
|
|
|
1038
|
-
|
|
1047
|
+
existing_uuids.add(line_hash) # Add to existing to prevent duplicates in same import
|
|
1048
|
+
|
|
1049
|
+
if len(batch_insert) >= batch_size:
|
|
1050
|
+
logger.info(f"Importing batch of {len(batch_insert)} lines...")
|
|
1051
|
+
GameLinesTable.add_lines(batch_insert)
|
|
1052
|
+
imported_count += len(batch_insert)
|
|
1053
|
+
batch_insert = []
|
|
1039
1054
|
games_set.add(game_name)
|
|
1040
1055
|
|
|
1041
1056
|
except Exception as e:
|
|
1057
|
+
logger.error(f"Error processing row {row_num}: {e}")
|
|
1042
1058
|
errors.append(f"Row {row_num}: Error processing row - {str(e)}")
|
|
1043
1059
|
continue
|
|
1060
|
+
|
|
1061
|
+
# Insert the rest of the batch
|
|
1062
|
+
if batch_insert:
|
|
1063
|
+
logger.info(f"Importing final batch of {len(batch_insert)} lines...")
|
|
1064
|
+
GameLinesTable.add_lines(batch_insert)
|
|
1065
|
+
imported_count += len(batch_insert)
|
|
1066
|
+
batch_insert = []
|
|
1044
1067
|
|
|
1045
|
-
# Import lines into database
|
|
1046
|
-
imported_count = 0
|
|
1047
|
-
for game_line in imported_lines:
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1068
|
+
# # Import lines into database
|
|
1069
|
+
# imported_count = 0
|
|
1070
|
+
# for game_line in imported_lines:
|
|
1071
|
+
# try:
|
|
1072
|
+
# game_line.add()
|
|
1073
|
+
# imported_count += 1
|
|
1074
|
+
# except Exception as e:
|
|
1075
|
+
# logger.error(f"Failed to import line {game_line.id}: {e}")
|
|
1076
|
+
# errors.append(f"Failed to import line {game_line.id}: {str(e)}")
|
|
1077
|
+
|
|
1055
1078
|
# Prepare response
|
|
1056
1079
|
response_data = {
|
|
1057
1080
|
'message': f'Successfully imported {imported_count} lines from {len(games_set)} games',
|
|
@@ -1066,6 +1089,8 @@ def register_database_api_routes(app):
|
|
|
1066
1089
|
|
|
1067
1090
|
logger.info(f"ExStatic import completed: {imported_count} lines from {len(games_set)} games")
|
|
1068
1091
|
|
|
1092
|
+
logger.info(f"Import response: {response_data}")
|
|
1093
|
+
|
|
1069
1094
|
return jsonify(response_data), 200
|
|
1070
1095
|
|
|
1071
1096
|
except csv.Error as e:
|
|
@@ -186,7 +186,7 @@ class SentenceSearchApp {
|
|
|
186
186
|
|
|
187
187
|
// Format timestamp to ISO format
|
|
188
188
|
const date = new Date(result.timestamp * 1000);
|
|
189
|
-
const formattedDate = date.
|
|
189
|
+
const formattedDate = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}-${String(date.getDate()).padStart(2, '0')} ${date.toTimeString().split(' ')[0]}`;
|
|
190
190
|
|
|
191
191
|
div.innerHTML = `
|
|
192
192
|
<div class="result-sentence">${highlightedText}</div>
|
|
@@ -128,7 +128,7 @@ document.addEventListener('DOMContentLoaded', function () {
|
|
|
128
128
|
for (let day = 0; day < 7; day++) {
|
|
129
129
|
const date = grid[day][week];
|
|
130
130
|
if (date) {
|
|
131
|
-
const dateStr = date.
|
|
131
|
+
const dateStr = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}-${String(date.getDate()).padStart(2, '0')}`;
|
|
132
132
|
const activity = yearData[dateStr] || 0;
|
|
133
133
|
dates.push({ date: dateStr, activity: activity });
|
|
134
134
|
}
|
|
@@ -154,7 +154,8 @@ document.addEventListener('DOMContentLoaded', function () {
|
|
|
154
154
|
}
|
|
155
155
|
|
|
156
156
|
// Calculate current streak from today backwards
|
|
157
|
-
const
|
|
157
|
+
const date = new Date();
|
|
158
|
+
const today = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}-${String(date.getDate()).padStart(2, '0')}`;
|
|
158
159
|
|
|
159
160
|
// Find today's index or the most recent date before today
|
|
160
161
|
let todayIndex = -1;
|
|
@@ -182,7 +183,10 @@ document.addEventListener('DOMContentLoaded', function () {
|
|
|
182
183
|
// Group timestamps by day for this year
|
|
183
184
|
const dailyTimestamps = {};
|
|
184
185
|
for (const line of allLinesForYear) {
|
|
185
|
-
const
|
|
186
|
+
const ts = parseFloat(line.timestamp);
|
|
187
|
+
if (isNaN(ts)) continue;
|
|
188
|
+
const dateObj = new Date(ts * 1000);
|
|
189
|
+
const dateStr = `${dateObj.getFullYear()}-${String(dateObj.getMonth() + 1).padStart(2, '0')}-${String(dateObj.getDate()).padStart(2, '0')}`;
|
|
186
190
|
if (!dailyTimestamps[dateStr]) {
|
|
187
191
|
dailyTimestamps[dateStr] = [];
|
|
188
192
|
}
|
|
@@ -326,7 +330,7 @@ document.addEventListener('DOMContentLoaded', function () {
|
|
|
326
330
|
|
|
327
331
|
const date = grid[day][week];
|
|
328
332
|
if (date) {
|
|
329
|
-
const dateStr = date.
|
|
333
|
+
const dateStr = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}-${String(date.getDate()).padStart(2, '0')}`;
|
|
330
334
|
const activity = yearData[dateStr] || 0;
|
|
331
335
|
|
|
332
336
|
if (activity > 0 && maxActivity > 0) {
|
|
@@ -881,11 +885,14 @@ document.addEventListener('DOMContentLoaded', function () {
|
|
|
881
885
|
// Group by day and calculate reading time using AFK timer logic
|
|
882
886
|
const dailyTimestamps = {};
|
|
883
887
|
for (const line of recentData) {
|
|
884
|
-
const
|
|
888
|
+
const ts = parseFloat(line.timestamp);
|
|
889
|
+
if (isNaN(ts)) continue;
|
|
890
|
+
const dateObj = new Date(ts * 1000);
|
|
891
|
+
const dateStr = `${dateObj.getFullYear()}-${String(dateObj.getMonth() + 1).padStart(2, '0')}-${String(dateObj.getDate()).padStart(2, '0')}`;
|
|
885
892
|
if (!dailyTimestamps[dateStr]) {
|
|
886
893
|
dailyTimestamps[dateStr] = [];
|
|
887
894
|
}
|
|
888
|
-
dailyTimestamps[dateStr].push(
|
|
895
|
+
dailyTimestamps[dateStr].push(ts);
|
|
889
896
|
}
|
|
890
897
|
|
|
891
898
|
for (const [dateStr, timestamps] of Object.entries(dailyTimestamps)) {
|
|
@@ -906,14 +913,20 @@ document.addEventListener('DOMContentLoaded', function () {
|
|
|
906
913
|
} else if (metricType === 'characters') {
|
|
907
914
|
// Group by day and sum characters
|
|
908
915
|
for (const line of recentData) {
|
|
909
|
-
const
|
|
916
|
+
const ts = parseFloat(line.timestamp);
|
|
917
|
+
if (isNaN(ts)) continue;
|
|
918
|
+
const dateObj = new Date(ts * 1000);
|
|
919
|
+
const dateStr = `${dateObj.getFullYear()}-${String(dateObj.getMonth() + 1).padStart(2, '0')}-${String(dateObj.getDate()).padStart(2, '0')}`;
|
|
910
920
|
dailyTotals[dateStr] = (dailyTotals[dateStr] || 0) + (line.characters || 0);
|
|
911
921
|
}
|
|
912
922
|
} else if (metricType === 'games') {
|
|
913
923
|
// Group by day and count unique games
|
|
914
924
|
const dailyGames = {};
|
|
915
925
|
for (const line of recentData) {
|
|
916
|
-
const
|
|
926
|
+
const ts = parseFloat(line.timestamp);
|
|
927
|
+
if (isNaN(ts)) continue;
|
|
928
|
+
const dateObj = new Date(ts * 1000);
|
|
929
|
+
const dateStr = `${dateObj.getFullYear()}-${String(dateObj.getMonth() + 1).padStart(2, '0')}-${String(dateObj.getDate()).padStart(2, '0')}`;
|
|
917
930
|
if (!dailyGames[dateStr]) {
|
|
918
931
|
dailyGames[dateStr] = new Set();
|
|
919
932
|
}
|
|
@@ -1141,14 +1154,19 @@ document.addEventListener('DOMContentLoaded', function () {
|
|
|
1141
1154
|
function loadDashboardData(data = null) {
|
|
1142
1155
|
function updateTodayOverview(allLinesData) {
|
|
1143
1156
|
// Get today's date string (YYYY-MM-DD)
|
|
1157
|
+
// Get today's date string (YYYY-MM-DD), timezone aware (local time)
|
|
1144
1158
|
const today = new Date();
|
|
1145
|
-
const
|
|
1159
|
+
const pad = n => n.toString().padStart(2, '0');
|
|
1160
|
+
const todayStr = `${today.getFullYear()}-${pad(today.getMonth() + 1)}-${pad(today.getDate())}`;
|
|
1146
1161
|
document.getElementById('todayDate').textContent = todayStr;
|
|
1147
1162
|
|
|
1148
1163
|
// Filter lines for today
|
|
1149
1164
|
const todayLines = (allLinesData || []).filter(line => {
|
|
1150
1165
|
if (!line.timestamp) return false;
|
|
1151
|
-
const
|
|
1166
|
+
const ts = parseFloat(line.timestamp);
|
|
1167
|
+
if (isNaN(ts)) return false;
|
|
1168
|
+
const dateObj = new Date(ts * 1000);
|
|
1169
|
+
const lineDate = `${dateObj.getFullYear()}-${pad(dateObj.getMonth() + 1)}-${pad(dateObj.getDate())}`;
|
|
1152
1170
|
return lineDate === todayStr;
|
|
1153
1171
|
});
|
|
1154
1172
|
|
|
@@ -1195,7 +1213,13 @@ document.addEventListener('DOMContentLoaded', function () {
|
|
|
1195
1213
|
.map(l => parseFloat(l.timestamp))
|
|
1196
1214
|
.filter(ts => !isNaN(ts))
|
|
1197
1215
|
.sort((a, b) => a - b);
|
|
1198
|
-
|
|
1216
|
+
// Get AFK timer from settings modal if available
|
|
1217
|
+
let afkTimerSeconds = 120; // default
|
|
1218
|
+
const afkTimerInput = document.getElementById('afkTimer');
|
|
1219
|
+
if (afkTimerInput && afkTimerInput.value) {
|
|
1220
|
+
const parsed = parseInt(afkTimerInput.value, 10);
|
|
1221
|
+
if (!isNaN(parsed) && parsed > 0) afkTimerSeconds = parsed;
|
|
1222
|
+
}
|
|
1199
1223
|
if (timestamps.length >= 2) {
|
|
1200
1224
|
for (let i = 1; i < timestamps.length; i++) {
|
|
1201
1225
|
const gap = timestamps[i] - timestamps[i-1];
|