GameSentenceMiner 2.13.15__py3-none-any.whl → 2.14.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- GameSentenceMiner/ai/ai_prompting.py +77 -132
- GameSentenceMiner/anki.py +48 -6
- GameSentenceMiner/config_gui.py +196 -30
- GameSentenceMiner/gametext.py +8 -19
- GameSentenceMiner/gsm.py +5 -4
- GameSentenceMiner/locales/en_us.json +21 -11
- GameSentenceMiner/locales/ja_jp.json +21 -11
- GameSentenceMiner/locales/zh_cn.json +9 -11
- GameSentenceMiner/owocr/owocr/ocr.py +20 -23
- GameSentenceMiner/tools/__init__.py +0 -0
- GameSentenceMiner/util/configuration.py +241 -105
- GameSentenceMiner/util/db.py +408 -0
- GameSentenceMiner/util/ffmpeg.py +2 -10
- GameSentenceMiner/util/get_overlay_coords.py +324 -0
- GameSentenceMiner/util/model.py +8 -2
- GameSentenceMiner/util/text_log.py +1 -1
- GameSentenceMiner/web/texthooking_page.py +1 -1
- GameSentenceMiner/wip/__init___.py +0 -0
- {gamesentenceminer-2.13.15.dist-info → gamesentenceminer-2.14.0rc1.dist-info}/METADATA +5 -1
- {gamesentenceminer-2.13.15.dist-info → gamesentenceminer-2.14.0rc1.dist-info}/RECORD +27 -25
- GameSentenceMiner/util/package.py +0 -37
- GameSentenceMiner/wip/get_overlay_coords.py +0 -535
- /GameSentenceMiner/{util → tools}/audio_offset_selector.py +0 -0
- /GameSentenceMiner/{util → tools}/ss_selector.py +0 -0
- /GameSentenceMiner/{util → tools}/window_transparency.py +0 -0
- {gamesentenceminer-2.13.15.dist-info → gamesentenceminer-2.14.0rc1.dist-info}/WHEEL +0 -0
- {gamesentenceminer-2.13.15.dist-info → gamesentenceminer-2.14.0rc1.dist-info}/entry_points.txt +0 -0
- {gamesentenceminer-2.13.15.dist-info → gamesentenceminer-2.14.0rc1.dist-info}/licenses/LICENSE +0 -0
- {gamesentenceminer-2.13.15.dist-info → gamesentenceminer-2.14.0rc1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,408 @@
|
|
1
|
+
|
2
|
+
|
3
|
+
import json
|
4
|
+
import os
|
5
|
+
import sqlite3
|
6
|
+
from sys import platform
|
7
|
+
import time
|
8
|
+
from typing import Any, Dict, List, Optional, Tuple, Union, Type, TypeVar
|
9
|
+
import threading
|
10
|
+
|
11
|
+
from GameSentenceMiner.util.text_log import GameLine
|
12
|
+
from GameSentenceMiner.util.configuration import logger, is_dev
|
13
|
+
|
14
|
+
|
15
|
+
class SQLiteDB:
|
16
|
+
"""
|
17
|
+
Multi-purpose SQLite database utility class for general use.
|
18
|
+
Thread-safe for basic operations.
|
19
|
+
"""
|
20
|
+
|
21
|
+
def __init__(self, db_path: str):
|
22
|
+
self.db_path = db_path
|
23
|
+
self._lock = threading.Lock()
|
24
|
+
|
25
|
+
def execute(self, query: str, params: Union[Tuple, Dict] = (), commit: bool = False) -> sqlite3.Cursor:
|
26
|
+
with self._lock, sqlite3.connect(self.db_path, check_same_thread=False) as conn:
|
27
|
+
if is_dev:
|
28
|
+
logger.debug(f"Executed query: {query} with params: {params}")
|
29
|
+
cur = conn.cursor()
|
30
|
+
cur.execute(query, params)
|
31
|
+
if commit:
|
32
|
+
conn.commit()
|
33
|
+
return cur
|
34
|
+
|
35
|
+
def executemany(self, query: str, seq_of_params: List[Union[Tuple, Dict]], commit: bool = False) -> sqlite3.Cursor:
|
36
|
+
with self._lock, sqlite3.connect(self.db_path, check_same_thread=False) as conn:
|
37
|
+
cur = conn.cursor()
|
38
|
+
cur.executemany(query, seq_of_params)
|
39
|
+
if commit:
|
40
|
+
conn.commit()
|
41
|
+
return cur
|
42
|
+
|
43
|
+
def fetchall(self, query: str, params: Union[Tuple, Dict] = ()) -> List[Tuple]:
|
44
|
+
cur = self.execute(query, params)
|
45
|
+
return cur.fetchall()
|
46
|
+
|
47
|
+
def fetchone(self, query: str, params: Union[Tuple, Dict] = ()) -> Optional[Tuple]:
|
48
|
+
cur = self.execute(query, params)
|
49
|
+
return cur.fetchone()
|
50
|
+
|
51
|
+
def create_table(self, table_sql: str):
|
52
|
+
self.execute(table_sql, commit=True)
|
53
|
+
|
54
|
+
def table_exists(self, table: str) -> bool:
|
55
|
+
result = self.fetchone(
|
56
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name=?", (table,))
|
57
|
+
return result is not None
|
58
|
+
|
59
|
+
def close(self):
|
60
|
+
pass
|
61
|
+
|
62
|
+
def __enter__(self):
|
63
|
+
return self
|
64
|
+
|
65
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
66
|
+
self.close()
|
67
|
+
|
68
|
+
|
69
|
+
# Abstract base for table-mapped classes
|
70
|
+
T = TypeVar('T', bound='SQLiteDBTable')
|
71
|
+
|
72
|
+
|
73
|
+
class SQLiteDBTable:
|
74
|
+
_db: SQLiteDB = None
|
75
|
+
_table: str = ''
|
76
|
+
_fields: List[str] = []
|
77
|
+
_types: List[type] = []
|
78
|
+
_pk: str = 'id'
|
79
|
+
_auto_increment: bool = True
|
80
|
+
|
81
|
+
def __init_subclass__(cls, **kwargs):
|
82
|
+
super().__init_subclass__(**kwargs)
|
83
|
+
if not hasattr(cls, '_table') or not cls._table:
|
84
|
+
cls._table = cls.__name__.lower()
|
85
|
+
if not hasattr(cls, '_fields') or not cls._fields:
|
86
|
+
raise NotImplementedError(f"{cls.__name__} must define _fields")
|
87
|
+
|
88
|
+
@classmethod
|
89
|
+
def set_db(cls, db: SQLiteDB):
|
90
|
+
cls._db = db
|
91
|
+
# Ensure table exists
|
92
|
+
if not db.table_exists(cls._table):
|
93
|
+
fields_def = ', '.join([f"{field} TEXT" for field in cls._fields])
|
94
|
+
pk_def = f"{cls._pk} TEXT PRIMARY KEY" if not cls._auto_increment else f"{cls._pk} INTEGER PRIMARY KEY AUTOINCREMENT"
|
95
|
+
create_table_sql = f"CREATE TABLE IF NOT EXISTS {cls._table} ({pk_def}, {fields_def})"
|
96
|
+
db.create_table(create_table_sql)
|
97
|
+
|
98
|
+
@classmethod
|
99
|
+
def all(cls: Type[T]) -> List[T]:
|
100
|
+
rows = cls._db.fetchall(f"SELECT * FROM {cls._table}")
|
101
|
+
return [cls.from_row(row) for row in rows]
|
102
|
+
|
103
|
+
@classmethod
|
104
|
+
def get(cls: Type[T], pk_value: Any) -> Optional[T]:
|
105
|
+
row = cls._db.fetchone(
|
106
|
+
f"SELECT * FROM {cls._table} WHERE {cls._pk}=?", (pk_value,))
|
107
|
+
return cls.from_row(row) if row else None
|
108
|
+
|
109
|
+
@classmethod
|
110
|
+
def one(cls: Type[T]) -> Optional[T]:
|
111
|
+
row = cls._db.fetchone(f"SELECT * FROM {cls._table} LIMIT 1")
|
112
|
+
return cls.from_row(row) if row else None
|
113
|
+
|
114
|
+
@classmethod
|
115
|
+
def from_row(cls: Type[T], row: Tuple) -> T:
|
116
|
+
if not row:
|
117
|
+
return None
|
118
|
+
obj = cls()
|
119
|
+
fields = [cls._pk] + cls._fields
|
120
|
+
for i, field in enumerate(fields):
|
121
|
+
if i == 0 and field == cls._pk:
|
122
|
+
if cls._types[i] == int:
|
123
|
+
setattr(obj, field, int(row[i])
|
124
|
+
if row[i] is not None else None)
|
125
|
+
elif cls._types[i] == str:
|
126
|
+
setattr(obj, field, str(row[i])
|
127
|
+
if row[i] is not None else None)
|
128
|
+
continue
|
129
|
+
if cls._types[i] == str:
|
130
|
+
if (row[i].startswith('[') or row[i].startswith('{')):
|
131
|
+
try:
|
132
|
+
setattr(obj, field, json.loads(row[i]))
|
133
|
+
except json.JSONDecodeError:
|
134
|
+
setattr(obj, field, row[i])
|
135
|
+
else:
|
136
|
+
setattr(obj, field, str(row[i])
|
137
|
+
if row[i] is not None else None)
|
138
|
+
elif cls._types[i] == list:
|
139
|
+
try:
|
140
|
+
setattr(obj, field, json.loads(row[i]) if row[i] else [])
|
141
|
+
except json.JSONDecodeError:
|
142
|
+
setattr(obj, field, [])
|
143
|
+
elif cls._types[i] == int:
|
144
|
+
setattr(obj, field, int(row[i])
|
145
|
+
if row[i] is not None else None)
|
146
|
+
elif cls._types[i] == float:
|
147
|
+
setattr(obj, field, float(row[i])
|
148
|
+
if row[i] is not None else None)
|
149
|
+
elif cls._types[i] == bool:
|
150
|
+
setattr(obj, field, bool(row[i])
|
151
|
+
if row[i] is not None else None)
|
152
|
+
elif cls._types[i] == dict:
|
153
|
+
try:
|
154
|
+
setattr(obj, field, json.loads(row[i]) if row[i] else {})
|
155
|
+
except json.JSONDecodeError:
|
156
|
+
setattr(obj, field, {})
|
157
|
+
else:
|
158
|
+
setattr(obj, field, row[i])
|
159
|
+
return obj
|
160
|
+
|
161
|
+
def save(self, retry=1):
|
162
|
+
try:
|
163
|
+
for field in self._fields:
|
164
|
+
if isinstance(getattr(self, field), list):
|
165
|
+
setattr(self, field, json.dumps(getattr(self, field)))
|
166
|
+
data = {field: getattr(self, field) for field in self._fields}
|
167
|
+
pk_val = getattr(self, self._pk, None)
|
168
|
+
if pk_val is None:
|
169
|
+
# Insert
|
170
|
+
keys = ', '.join(data.keys())
|
171
|
+
placeholders = ', '.join(['?'] * len(data))
|
172
|
+
values = tuple(data.values())
|
173
|
+
query = f"INSERT INTO {self._table} ({keys}) VALUES ({placeholders})"
|
174
|
+
cur = self._db.execute(query, values, commit=True)
|
175
|
+
setattr(self, self._pk, cur.lastrowid)
|
176
|
+
logger.debug(f"Inserted into {self._table} id={cur.lastrowid}")
|
177
|
+
else:
|
178
|
+
# Update
|
179
|
+
set_clause = ', '.join([f"{k}=?" for k in data.keys()])
|
180
|
+
values = tuple(data.values())
|
181
|
+
query = f"UPDATE {self._table} SET {set_clause} WHERE {self._pk}=?"
|
182
|
+
self._db.execute(query, values + (pk_val,), commit=True)
|
183
|
+
logger.debug(f"Updated {self._table} id={pk_val}")
|
184
|
+
except sqlite3.OperationalError as e:
|
185
|
+
if retry <= 0:
|
186
|
+
logger.error(f"Failed to save record to {self._table}: {e}")
|
187
|
+
return
|
188
|
+
if "no column named" in str(e):
|
189
|
+
new_column = str(e).split("no column named ")[1].strip()
|
190
|
+
logger.info(f"Adding missing column {new_column} to {self._table}")
|
191
|
+
# Get type of new column from self._types by matching column name in _fields
|
192
|
+
if new_column in self._fields:
|
193
|
+
self.add_column(new_column)
|
194
|
+
self.save(retry=retry - 1) # Retry after adding column
|
195
|
+
|
196
|
+
def add(self, retry=1):
|
197
|
+
try:
|
198
|
+
pk_val = getattr(self, self._pk, None)
|
199
|
+
if cls._auto_increment:
|
200
|
+
self.save()
|
201
|
+
elif pk_val is None:
|
202
|
+
raise ValueError(
|
203
|
+
f"Primary key {self._pk} must be set for non-auto-increment tables.")
|
204
|
+
else:
|
205
|
+
keys = ', '.join(self._fields + [self._pk])
|
206
|
+
placeholders = ', '.join(['?'] * (len(self._fields) + 1))
|
207
|
+
values = tuple(getattr(self, field)
|
208
|
+
for field in self._fields) + (pk_val,)
|
209
|
+
query = f"INSERT INTO {self._table} ({keys}) VALUES ({placeholders})"
|
210
|
+
self._db.execute(query, values, commit=True)
|
211
|
+
except sqlite3.OperationalError as e:
|
212
|
+
if retry <= 0:
|
213
|
+
logger.error(f"Failed to add record to {self._table}: {e}")
|
214
|
+
return
|
215
|
+
if "no column named" in str(e):
|
216
|
+
new_column = str(e).split("no column named ")[1].strip()
|
217
|
+
logger.info(f"Adding missing column {new_column} to {self._table}")
|
218
|
+
# Get type of new column from self._types by matching column name in _fields
|
219
|
+
if new_column in self._fields:
|
220
|
+
self.add_column(new_column)
|
221
|
+
self.add(retry=retry - 1) # Retry after adding column
|
222
|
+
|
223
|
+
def add_column(self, column_name: str, new_column_type: str = "TEXT"):
|
224
|
+
try:
|
225
|
+
index = self._fields.index(column_name) + 1
|
226
|
+
self._db.execute(
|
227
|
+
f"ALTER TABLE {self._table} ADD COLUMN {column_name} {new_column_type}", commit=True)
|
228
|
+
logger.info(f"Added column {column_name} to {self._table}")
|
229
|
+
except sqlite3.OperationalError as e:
|
230
|
+
if "duplicate column name" in str(e):
|
231
|
+
logger.warning(
|
232
|
+
f"Column {column_name} already exists in {self._table}.")
|
233
|
+
else:
|
234
|
+
logger.error(
|
235
|
+
f"Failed to add column {column_name} to {self._table}: {e}")
|
236
|
+
|
237
|
+
def delete(self):
|
238
|
+
pk_val = getattr(self, self._pk, None)
|
239
|
+
if pk_val is not None:
|
240
|
+
query = f"DELETE FROM {self._table} WHERE {self._pk}=?"
|
241
|
+
self._db.execute(query, (pk_val,), commit=True)
|
242
|
+
|
243
|
+
def print(self):
|
244
|
+
pk_val = getattr(self, self._pk, None)
|
245
|
+
logger.info(f"{self._table} Record (id={pk_val}): " +
|
246
|
+
', '.join([f"{field}={getattr(self, field)}" for field in self._fields]))
|
247
|
+
|
248
|
+
@classmethod
|
249
|
+
def drop(cls):
|
250
|
+
cls._db.execute(f"DROP TABLE IF EXISTS {cls._table}", commit=True)
|
251
|
+
|
252
|
+
|
253
|
+
class AIModelsTable(SQLiteDBTable):
|
254
|
+
_table = 'ai_models'
|
255
|
+
_fields = ['gemini_models', 'groq_models', 'last_updated']
|
256
|
+
_types = [int, # Includes primary key type
|
257
|
+
list, list, float]
|
258
|
+
_pk = 'id'
|
259
|
+
|
260
|
+
def __init__(self, id: Optional[int] = None, gemini_models: list = None, groq_models: list = None, last_updated: Optional[float] = None):
|
261
|
+
self.id = id
|
262
|
+
self.gemini_models = gemini_models if gemini_models is not None else []
|
263
|
+
self.groq_models = groq_models if groq_models is not None else []
|
264
|
+
self.last_updated = last_updated
|
265
|
+
|
266
|
+
@classmethod
|
267
|
+
def get_gemini_models(cls) -> List[str]:
|
268
|
+
rows = cls.all()
|
269
|
+
return rows[0].gemini_models if rows else []
|
270
|
+
|
271
|
+
@classmethod
|
272
|
+
def get_groq_models(cls) -> List[str]:
|
273
|
+
rows = cls.all()
|
274
|
+
return rows[0].groq_models if rows else []
|
275
|
+
|
276
|
+
@classmethod
|
277
|
+
def update_models(cls, gemini_models: List[str], groq_models: List[str]):
|
278
|
+
models = cls.one()
|
279
|
+
if not models:
|
280
|
+
new_model = cls(gemini_models=gemini_models,
|
281
|
+
groq_models=groq_models, last_updated=time.time())
|
282
|
+
new_model.save()
|
283
|
+
return
|
284
|
+
if models.gemini_models:
|
285
|
+
models.gemini_models = gemini_models
|
286
|
+
if models.groq_models:
|
287
|
+
models.groq_models = groq_models
|
288
|
+
models.last_updated = time.time()
|
289
|
+
models.save()
|
290
|
+
|
291
|
+
@classmethod
|
292
|
+
def set_gemini_models(cls, models: List[str]):
|
293
|
+
models = cls.all()
|
294
|
+
if not models:
|
295
|
+
new_model = cls(gemini_models=models,
|
296
|
+
groq_models=[], last_updated=time.time())
|
297
|
+
new_model.save()
|
298
|
+
return
|
299
|
+
for model in models:
|
300
|
+
model.gemini_models = models
|
301
|
+
model.last_updated = time.time()
|
302
|
+
model.save()
|
303
|
+
|
304
|
+
@classmethod
|
305
|
+
def set_groq_models(cls, models: List[str]):
|
306
|
+
models = cls.all()
|
307
|
+
if not models:
|
308
|
+
new_model = cls(gemini_models=[], groq_models=models,
|
309
|
+
last_updated=time.time())
|
310
|
+
new_model.save()
|
311
|
+
return
|
312
|
+
for model in models:
|
313
|
+
model.groq_models = models
|
314
|
+
model.last_updated = time.time()
|
315
|
+
model.save()
|
316
|
+
|
317
|
+
|
318
|
+
class GameLinesTable(SQLiteDBTable):
|
319
|
+
_table = 'game_lines'
|
320
|
+
_fields = ['game_name', 'line_text', 'timestamp', 'screenshot_in_anki',
|
321
|
+
'audio_in_anki', 'screenshot_path', 'audio_path', 'replay_path', 'translation']
|
322
|
+
_types = [str, # Includes primary key type
|
323
|
+
str, str, str, str, str, str, str, str, str]
|
324
|
+
_pk = 'id'
|
325
|
+
_auto_increment = False # Use string IDs
|
326
|
+
|
327
|
+
def __init__(self, id: Optional[str] = None,
|
328
|
+
game_name: Optional[str] = None,
|
329
|
+
line_text: Optional[str] = None,
|
330
|
+
context: Optional[str] = None,
|
331
|
+
timestamp: Optional[float] = None,
|
332
|
+
screenshot_in_anki: Optional[str] = None,
|
333
|
+
audio_in_anki: Optional[str] = None,
|
334
|
+
screenshot_path: Optional[str] = None,
|
335
|
+
audio_path: Optional[str] = None,
|
336
|
+
replay_path: Optional[str] = None,
|
337
|
+
translation: Optional[str] = None):
|
338
|
+
self.id = id
|
339
|
+
self.game_name = game_name
|
340
|
+
self.line_text = line_text
|
341
|
+
self.context = context
|
342
|
+
self.timestamp = timestamp if timestamp is not None else time.time()
|
343
|
+
self.screenshot_in_anki = screenshot_in_anki if screenshot_in_anki is not None else ''
|
344
|
+
self.audio_in_anki = audio_in_anki if audio_in_anki is not None else ''
|
345
|
+
self.screenshot_path = screenshot_path if screenshot_path is not None else ''
|
346
|
+
self.audio_path = audio_path if audio_path is not None else ''
|
347
|
+
self.replay_path = replay_path if replay_path is not None else ''
|
348
|
+
self.translation = translation if translation is not None else ''
|
349
|
+
|
350
|
+
@classmethod
|
351
|
+
def get_all_lines_for_scene(cls, game_name: str) -> List['GameLinesTable']:
|
352
|
+
rows = cls._db.fetchall(
|
353
|
+
f"SELECT * FROM {cls._table} WHERE game_name=?", (game_name,))
|
354
|
+
return [cls.from_row(row) for row in rows]
|
355
|
+
|
356
|
+
@classmethod
|
357
|
+
def get_all_games_with_lines(cls) -> List[str]:
|
358
|
+
rows = cls._db.fetchall(f"SELECT DISTINCT game_name FROM {cls._table}")
|
359
|
+
return [row[0] for row in rows if row[0] is not None]
|
360
|
+
|
361
|
+
@classmethod
|
362
|
+
def update(cls, line_id: str, audio_in_anki: Optional[str] = None, screenshot_in_anki: Optional[str] = None, audio_path: Optional[str] = None, screenshot_path: Optional[str] = None, replay_path: Optional[str] = None, translation: Optional[str] = None):
|
363
|
+
line = cls.get(line_id)
|
364
|
+
if not line:
|
365
|
+
logger.warning(f"GameLine with id {line_id} not found for update.")
|
366
|
+
return
|
367
|
+
if screenshot_path is not None:
|
368
|
+
line.screenshot_path = screenshot_path
|
369
|
+
if audio_path is not None:
|
370
|
+
line.audio_path = audio_path
|
371
|
+
if replay_path is not None:
|
372
|
+
line.replay_path = replay_path
|
373
|
+
if screenshot_in_anki is not None:
|
374
|
+
line.screenshot_in_anki = screenshot_in_anki
|
375
|
+
if audio_in_anki is not None:
|
376
|
+
line.audio_in_anki = audio_in_anki
|
377
|
+
if translation is not None:
|
378
|
+
line.translation = translation
|
379
|
+
line.save()
|
380
|
+
logger.debug(f"Updated GameLine id={line_id} paths.")
|
381
|
+
|
382
|
+
@classmethod
|
383
|
+
def add_line(cls, gameline: GameLine):
|
384
|
+
new_line = cls(id=gameline.id, game_name=gameline.scene,
|
385
|
+
line_text=gameline.text, timestamp=gameline.time.timestamp())
|
386
|
+
# logger.info("Adding GameLine to DB: %s", new_line)
|
387
|
+
new_line.add()
|
388
|
+
return new_line
|
389
|
+
|
390
|
+
|
391
|
+
def get_db_directory():
|
392
|
+
if platform == 'win32': # Windows
|
393
|
+
appdata_dir = os.getenv('APPDATA')
|
394
|
+
else: # macOS and Linux
|
395
|
+
appdata_dir = os.path.expanduser('~/.config')
|
396
|
+
config_dir = os.path.join(appdata_dir, 'GameSentenceMiner')
|
397
|
+
# Create the directory if it doesn't exist
|
398
|
+
os.makedirs(config_dir, exist_ok=True)
|
399
|
+
return os.path.join(config_dir, 'gsm.db')
|
400
|
+
|
401
|
+
|
402
|
+
gsm_db = SQLiteDB(get_db_directory())
|
403
|
+
|
404
|
+
for cls in [AIModelsTable, GameLinesTable]:
|
405
|
+
cls.set_db(gsm_db)
|
406
|
+
# Uncomment to start fresh every time
|
407
|
+
# cls.drop()
|
408
|
+
# cls.set_db(gsm_db) # --- IGNORE ---
|
GameSentenceMiner/util/ffmpeg.py
CHANGED
@@ -39,16 +39,11 @@ def call_frame_extractor(video_path, timestamp):
|
|
39
39
|
str: The path of the selected image, or None on error.
|
40
40
|
"""
|
41
41
|
try:
|
42
|
-
|
43
|
-
current_dir = os.path.dirname(os.path.abspath(__file__))
|
44
|
-
# Construct the path to the frame extractor script
|
45
|
-
script_path = os.path.join(current_dir, "ss_selector.py") # Replace with the actual script name if different
|
46
|
-
|
47
|
-
logger.info(' '.join([sys.executable, "-m", "GameSentenceMiner.util.ss_selector", video_path, str(timestamp)]))
|
42
|
+
logger.info(' '.join([sys.executable, "-m", "GameSentenceMiner.tools.ss_selector", video_path, str(timestamp)]))
|
48
43
|
|
49
44
|
# Run the script using subprocess.run()
|
50
45
|
result = subprocess.run(
|
51
|
-
[sys.executable, "-m", "GameSentenceMiner.
|
46
|
+
[sys.executable, "-m", "GameSentenceMiner.tools.ss_selector", video_path, str(timestamp), get_config().screenshot.screenshot_timing_setting], # Use sys.executable
|
52
47
|
capture_output=True,
|
53
48
|
text=True, # Get output as text
|
54
49
|
check=False # Raise an exception for non-zero exit codes
|
@@ -65,9 +60,6 @@ def call_frame_extractor(video_path, timestamp):
|
|
65
60
|
logger.error(f"Error calling script: {e}")
|
66
61
|
logger.error(f"Script output (stderr): {e.stderr.strip()}")
|
67
62
|
return None
|
68
|
-
except FileNotFoundError:
|
69
|
-
logger.error(f"Error: Script not found at {script_path}. Make sure the script name is correct.")
|
70
|
-
return None
|
71
63
|
except Exception as e:
|
72
64
|
logger.error(f"An unexpected error occurred: {e}")
|
73
65
|
return None
|