GameSentenceMiner 2.17.5__py3-none-any.whl → 2.17.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- GameSentenceMiner/ai/ai_prompting.py +257 -120
- {gamesentenceminer-2.17.5.dist-info → gamesentenceminer-2.17.6.dist-info}/METADATA +1 -1
- {gamesentenceminer-2.17.5.dist-info → gamesentenceminer-2.17.6.dist-info}/RECORD +7 -7
- {gamesentenceminer-2.17.5.dist-info → gamesentenceminer-2.17.6.dist-info}/WHEEL +0 -0
- {gamesentenceminer-2.17.5.dist-info → gamesentenceminer-2.17.6.dist-info}/entry_points.txt +0 -0
- {gamesentenceminer-2.17.5.dist-info → gamesentenceminer-2.17.6.dist-info}/licenses/LICENSE +0 -0
- {gamesentenceminer-2.17.5.dist-info → gamesentenceminer-2.17.6.dist-info}/top_level.txt +0 -0
|
@@ -44,11 +44,13 @@ Provide a very brief summary of the scene in {get_config().general.get_native_la
|
|
|
44
44
|
Current Sentence:
|
|
45
45
|
""")
|
|
46
46
|
|
|
47
|
+
|
|
47
48
|
class AIType(Enum):
|
|
48
49
|
GEMINI = "Gemini"
|
|
49
50
|
GROQ = "Groq"
|
|
50
51
|
OPENAI = "OpenAI"
|
|
51
52
|
|
|
53
|
+
|
|
52
54
|
@dataclass
|
|
53
55
|
class AIConfig:
|
|
54
56
|
api_key: str
|
|
@@ -56,16 +58,19 @@ class AIConfig:
|
|
|
56
58
|
api_url: Optional[str]
|
|
57
59
|
type: 'AIType'
|
|
58
60
|
|
|
61
|
+
|
|
59
62
|
@dataclass
|
|
60
63
|
class GeminiAIConfig(AIConfig):
|
|
61
64
|
def __init__(self, api_key: str, model: str = "gemini-2.0-flash"):
|
|
62
65
|
super().__init__(api_key=api_key, model=model, api_url=None, type=AIType.GEMINI)
|
|
63
66
|
|
|
67
|
+
|
|
64
68
|
@dataclass
|
|
65
69
|
class GroqAiConfig(AIConfig):
|
|
66
70
|
def __init__(self, api_key: str, model: str = "meta-llama/llama-4-scout-17b-16e-instruct"):
|
|
67
71
|
super().__init__(api_key=api_key, model=model, api_url=None, type=AIType.GROQ)
|
|
68
72
|
|
|
73
|
+
|
|
69
74
|
@dataclass
|
|
70
75
|
class OpenAIAIConfig(AIConfig):
|
|
71
76
|
def __init__(self, api_key: str, model: str = "openai/gpt-oss-20b", api_url: Optional[str] = None):
|
|
@@ -88,8 +93,10 @@ class AIManager(ABC):
|
|
|
88
93
|
start_index = 0
|
|
89
94
|
end_index = len(lines)
|
|
90
95
|
else:
|
|
91
|
-
start_index = max(0, current_line.index -
|
|
92
|
-
|
|
96
|
+
start_index = max(0, current_line.index -
|
|
97
|
+
get_config().ai.dialogue_context_length)
|
|
98
|
+
end_index = min(len(lines), current_line.index +
|
|
99
|
+
1 + get_config().ai.dialogue_context_length)
|
|
93
100
|
|
|
94
101
|
context_lines_text = []
|
|
95
102
|
for i in range(start_index, end_index):
|
|
@@ -129,24 +136,28 @@ class AIManager(ABC):
|
|
|
129
136
|
class OpenAIManager(AIManager):
|
|
130
137
|
def __init__(self, model, api_url, api_key, logger: Optional[logging.Logger] = None):
|
|
131
138
|
super().__init__(OpenAIAIConfig(api_key=api_key, model=model, api_url=api_url), logger)
|
|
139
|
+
self.extra_params_allowed = True
|
|
132
140
|
try:
|
|
133
141
|
import openai
|
|
134
142
|
self.client = openai.OpenAI(
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
143
|
+
base_url=api_url,
|
|
144
|
+
api_key=api_key
|
|
145
|
+
)
|
|
138
146
|
self.model_name = model
|
|
139
147
|
if MANUAL_MODEL_OVERRIDE:
|
|
140
148
|
self.model_name = MANUAL_MODEL_OVERRIDE
|
|
141
|
-
self.logger.warning(
|
|
142
|
-
|
|
149
|
+
self.logger.warning(
|
|
150
|
+
f"MANUAL MODEL OVERRIDE ENABLED! Using model: {self.model_name}")
|
|
151
|
+
self.logger.debug(
|
|
152
|
+
f"OpenAIManager initialized with model: {self.model_name}")
|
|
143
153
|
except Exception as e:
|
|
144
154
|
self.logger.error(f"Failed to initialize OpenAI API: {e}")
|
|
145
155
|
self.openai = None
|
|
146
156
|
self.model_name = None
|
|
147
|
-
|
|
157
|
+
|
|
148
158
|
def _build_prompt(self, lines: List[GameLine], sentence: str, current_line: GameLine, game_title: str, custom_prompt=None) -> str:
|
|
149
|
-
prompt = super()._build_prompt(lines, sentence, current_line,
|
|
159
|
+
prompt = super()._build_prompt(lines, sentence, current_line,
|
|
160
|
+
game_title, custom_prompt=custom_prompt)
|
|
150
161
|
return prompt
|
|
151
162
|
|
|
152
163
|
def process(self, lines: List[GameLine], sentence: str, current_line: GameLine, game_title: str = "", custom_prompt=None) -> str:
|
|
@@ -154,29 +165,56 @@ class OpenAIManager(AIManager):
|
|
|
154
165
|
return "Processing failed: OpenAI client not initialized."
|
|
155
166
|
|
|
156
167
|
if not lines or not current_line:
|
|
157
|
-
self.logger.warning(
|
|
168
|
+
self.logger.warning(
|
|
169
|
+
f"Invalid input for process: lines={len(lines)}, current_line={current_line.index}")
|
|
158
170
|
return "Invalid input."
|
|
159
171
|
|
|
172
|
+
if any(model in self.model_name.lower() for model in ['gpt-5']):
|
|
173
|
+
self.logger.warning("GPT-5 model detected, using basic parameters.")
|
|
174
|
+
self.extra_params_allowed = False
|
|
175
|
+
else:
|
|
176
|
+
self.extra_params_allowed = True
|
|
177
|
+
|
|
160
178
|
try:
|
|
161
|
-
prompt = self._build_prompt(
|
|
179
|
+
prompt = self._build_prompt(
|
|
180
|
+
lines, sentence, current_line, game_title, custom_prompt=custom_prompt)
|
|
162
181
|
self.logger.debug(f"Generated prompt:\n{prompt}")
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
182
|
+
# Try with full parameters first, fallback to basic parameters if model doesn't support them
|
|
183
|
+
if self.extra_params_allowed:
|
|
184
|
+
try:
|
|
185
|
+
response = self.client.chat.completions.create(
|
|
186
|
+
model=self.model_name,
|
|
187
|
+
messages=[
|
|
188
|
+
{"role": "system", "content": "You are a helpful assistant that translates game dialogue. Provide output in the form of json with a single key 'output'."},
|
|
189
|
+
{"role": "user", "content": prompt}
|
|
190
|
+
],
|
|
191
|
+
temperature=0.3,
|
|
192
|
+
max_tokens=4096,
|
|
193
|
+
top_p=0.9,
|
|
194
|
+
n=1,
|
|
195
|
+
stop=None,
|
|
196
|
+
)
|
|
197
|
+
except Exception as e:
|
|
198
|
+
self.extra_params_allowed = False
|
|
199
|
+
self.logger.warning(
|
|
200
|
+
f"Full parameter request failed, trying with basic parameters: {e}")
|
|
201
|
+
|
|
202
|
+
if not self.extra_params_allowed:
|
|
203
|
+
response = self.client.chat.completions.create(
|
|
204
|
+
model=self.model_name,
|
|
205
|
+
messages=[
|
|
206
|
+
{"role": "system", "content": "You are a helpful assistant that translates game dialogue. Provide output in the form of json with a single key 'output'."},
|
|
207
|
+
{"role": "user", "content": prompt}
|
|
208
|
+
],
|
|
209
|
+
n=1,
|
|
210
|
+
)
|
|
211
|
+
|
|
175
212
|
if response.choices and response.choices[0].message.content:
|
|
176
213
|
text_output = response.choices[0].message.content.strip()
|
|
177
214
|
# get the json at the end of the message
|
|
178
215
|
if "{" in text_output and "}" in text_output:
|
|
179
|
-
json_output = text_output[text_output.find(
|
|
216
|
+
json_output = text_output[text_output.find(
|
|
217
|
+
"{"):text_output.rfind("}")+1]
|
|
180
218
|
text_output = json.loads(json_output)['output']
|
|
181
219
|
self.logger.debug(f"Received response:\n{text_output}")
|
|
182
220
|
return text_output
|
|
@@ -193,7 +231,8 @@ class GeminiAI(AIManager):
|
|
|
193
231
|
self.model_name = model
|
|
194
232
|
if MANUAL_MODEL_OVERRIDE:
|
|
195
233
|
self.model_name = MANUAL_MODEL_OVERRIDE
|
|
196
|
-
self.logger.warning(
|
|
234
|
+
self.logger.warning(
|
|
235
|
+
f"MANUAL MODEL OVERRIDE ENABLED! Using model: {self.model_name}")
|
|
197
236
|
# genai.configure(api_key=self.ai_config.api_key)
|
|
198
237
|
self.generation_config = types.GenerateContentConfig(
|
|
199
238
|
temperature=0.5,
|
|
@@ -201,23 +240,29 @@ class GeminiAI(AIManager):
|
|
|
201
240
|
top_p=0.9,
|
|
202
241
|
stop_sequences=None,
|
|
203
242
|
safety_settings=[
|
|
204
|
-
types.SafetySetting(category=types.HarmCategory.HARM_CATEGORY_HARASSMENT,
|
|
205
|
-
|
|
206
|
-
types.SafetySetting(category=types.HarmCategory.
|
|
207
|
-
|
|
243
|
+
types.SafetySetting(category=types.HarmCategory.HARM_CATEGORY_HARASSMENT,
|
|
244
|
+
threshold=types.HarmBlockThreshold.BLOCK_NONE),
|
|
245
|
+
types.SafetySetting(category=types.HarmCategory.HARM_CATEGORY_HATE_SPEECH,
|
|
246
|
+
threshold=types.HarmBlockThreshold.BLOCK_NONE),
|
|
247
|
+
types.SafetySetting(category=types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
|
|
248
|
+
threshold=types.HarmBlockThreshold.BLOCK_NONE),
|
|
249
|
+
types.SafetySetting(category=types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
|
|
250
|
+
threshold=types.HarmBlockThreshold.BLOCK_NONE),
|
|
208
251
|
],
|
|
209
252
|
)
|
|
210
253
|
if "2.5" in self.model_name:
|
|
211
254
|
self.generation_config.thinking_config = types.ThinkingConfig(
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
self.logger.debug(
|
|
255
|
+
thinking_budget=-1
|
|
256
|
+
)
|
|
257
|
+
self.logger.debug(
|
|
258
|
+
f"GeminiAIManager initialized with model: {self.model_name}")
|
|
215
259
|
except Exception as e:
|
|
216
260
|
self.logger.error(f"Failed to initialize Gemini API: {e}")
|
|
217
261
|
self.model_name = None
|
|
218
262
|
|
|
219
263
|
def _build_prompt(self, lines: List[GameLine], sentence: str, current_line: GameLine, game_title: str, custom_prompt=None) -> str:
|
|
220
|
-
prompt = super()._build_prompt(lines, sentence, current_line,
|
|
264
|
+
prompt = super()._build_prompt(lines, sentence, current_line,
|
|
265
|
+
game_title, custom_prompt=custom_prompt)
|
|
221
266
|
return prompt
|
|
222
267
|
|
|
223
268
|
def process(self, lines: List[GameLine], sentence: str, current_line: GameLine, game_title: str = "", custom_prompt=None) -> str:
|
|
@@ -225,11 +270,13 @@ class GeminiAI(AIManager):
|
|
|
225
270
|
return "Processing failed: AI model not initialized."
|
|
226
271
|
|
|
227
272
|
if not lines or not current_line:
|
|
228
|
-
self.logger.warning(
|
|
273
|
+
self.logger.warning(
|
|
274
|
+
f"Invalid input for process: lines={len(lines)}, current_line={current_line.index}")
|
|
229
275
|
return "Invalid input."
|
|
230
276
|
|
|
231
277
|
try:
|
|
232
|
-
prompt = self._build_prompt(
|
|
278
|
+
prompt = self._build_prompt(
|
|
279
|
+
lines, sentence, current_line, game_title, custom_prompt=custom_prompt)
|
|
233
280
|
contents = [
|
|
234
281
|
types.Content(
|
|
235
282
|
role="user",
|
|
@@ -252,20 +299,22 @@ class GeminiAI(AIManager):
|
|
|
252
299
|
self.logger.error(f"Gemini processing failed: {e}")
|
|
253
300
|
return f"Processing failed: {e}"
|
|
254
301
|
|
|
302
|
+
|
|
255
303
|
class GroqAI(AIManager):
|
|
256
304
|
def __init__(self, model, api_key, logger: Optional[logging.Logger] = None):
|
|
257
305
|
super().__init__(GroqAiConfig(model=model, api_key=api_key), logger)
|
|
258
306
|
self.api_key = self.ai_config.api_key
|
|
259
|
-
self.model_name = self.ai_config.model
|
|
260
307
|
try:
|
|
261
308
|
self.client = Groq(api_key=self.api_key)
|
|
262
|
-
self.logger.debug(
|
|
309
|
+
self.logger.debug(
|
|
310
|
+
f"GroqAIManager initialized with model: {self.model_name}")
|
|
263
311
|
except Exception as e:
|
|
264
312
|
self.logger.error(f"Failed to initialize Groq client: {e}")
|
|
265
313
|
self.client = None
|
|
266
314
|
|
|
267
315
|
def _build_prompt(self, lines: List[GameLine], sentence: str, current_line: GameLine, game_title: str, custom_prompt=None) -> str:
|
|
268
|
-
prompt = super()._build_prompt(lines, sentence, current_line,
|
|
316
|
+
prompt = super()._build_prompt(lines, sentence, current_line,
|
|
317
|
+
game_title, custom_prompt=custom_prompt)
|
|
269
318
|
return prompt
|
|
270
319
|
|
|
271
320
|
def process(self, lines: List[GameLine], sentence: str, current_line: GameLine, game_title: str = "", custom_prompt=None) -> str:
|
|
@@ -273,11 +322,13 @@ class GroqAI(AIManager):
|
|
|
273
322
|
return "Processing failed: Groq client not initialized."
|
|
274
323
|
|
|
275
324
|
if not lines or not current_line:
|
|
276
|
-
self.logger.warning(
|
|
325
|
+
self.logger.warning(
|
|
326
|
+
f"Invalid input for process: lines={len(lines)}, current_line={current_line.index}")
|
|
277
327
|
return "Invalid input."
|
|
278
328
|
|
|
279
329
|
try:
|
|
280
|
-
prompt = self._build_prompt(
|
|
330
|
+
prompt = self._build_prompt(
|
|
331
|
+
lines, sentence, current_line, game_title, custom_prompt=custom_prompt)
|
|
281
332
|
self.logger.debug(f"Generated prompt:\n{prompt}")
|
|
282
333
|
completion = self.client.chat.completions.create(
|
|
283
334
|
model=self.model_name,
|
|
@@ -295,16 +346,19 @@ class GroqAI(AIManager):
|
|
|
295
346
|
self.logger.error(f"Groq processing failed: {e}")
|
|
296
347
|
return f"Processing failed: {e}"
|
|
297
348
|
|
|
349
|
+
|
|
298
350
|
ai_managers: dict[str, AIManager] = {}
|
|
299
351
|
ai_manager: AIManager | None = None
|
|
300
352
|
current_ai_config: Ai | None = None
|
|
301
353
|
|
|
354
|
+
|
|
302
355
|
def get_ai_prompt_result(lines: List[GameLine], sentence: str, current_line: GameLine, game_title: str = "", force_refresh: bool = False, custom_prompt=None) -> str:
|
|
303
356
|
global ai_manager, current_ai_config
|
|
304
357
|
try:
|
|
305
358
|
is_local_provider = get_config().ai.provider == AIType.OPENAI.value
|
|
306
359
|
if not is_local_provider and not is_connected():
|
|
307
|
-
logger.error(
|
|
360
|
+
logger.error(
|
|
361
|
+
"No internet connection. Unable to proceed with AI prompt.")
|
|
308
362
|
return ""
|
|
309
363
|
|
|
310
364
|
if not ai_manager or ai_config_changed(get_config().ai, current_ai_config) or force_refresh:
|
|
@@ -312,21 +366,27 @@ def get_ai_prompt_result(lines: List[GameLine], sentence: str, current_line: Gam
|
|
|
312
366
|
if provider == AIType.GEMINI.value:
|
|
313
367
|
if get_config().ai.gemini_model in ai_managers:
|
|
314
368
|
ai_manager = ai_managers[get_config().ai.gemini_model]
|
|
315
|
-
logger.info(
|
|
369
|
+
logger.info(
|
|
370
|
+
f"Reusing existing Gemini AI Manager for model: {get_config().ai.gemini_model}")
|
|
316
371
|
else:
|
|
317
|
-
ai_manager = GeminiAI(model=get_config(
|
|
372
|
+
ai_manager = GeminiAI(model=get_config(
|
|
373
|
+
).ai.gemini_model, api_key=get_config().ai.gemini_api_key, logger=logger)
|
|
318
374
|
elif provider == AIType.GROQ.value:
|
|
319
375
|
if get_config().ai.groq_model in ai_managers:
|
|
320
376
|
ai_manager = ai_managers[get_config().ai.groq_model]
|
|
321
|
-
logger.info(
|
|
377
|
+
logger.info(
|
|
378
|
+
f"Reusing existing Groq AI Manager for model: {get_config().ai.groq_model}")
|
|
322
379
|
else:
|
|
323
|
-
ai_manager = GroqAI(model=get_config(
|
|
380
|
+
ai_manager = GroqAI(model=get_config(
|
|
381
|
+
).ai.groq_model, api_key=get_config().ai.groq_api_key, logger=logger)
|
|
324
382
|
elif provider == AIType.OPENAI.value:
|
|
325
|
-
if get_config().ai.open_ai_model in ai_managers:
|
|
326
|
-
ai_manager = ai_managers[get_config().ai.open_ai_model]
|
|
327
|
-
logger.info(
|
|
383
|
+
if f"{get_config().ai.open_ai_url}:{get_config().ai.open_ai_model}:{get_config().ai.open_ai_api_key}" in ai_managers:
|
|
384
|
+
ai_manager = ai_managers[f"{get_config().ai.open_ai_url}:{get_config().ai.open_ai_model}:{get_config().ai.open_ai_api_key}"]
|
|
385
|
+
logger.info(
|
|
386
|
+
f"Reusing existing OpenAI AI Manager for model: {get_config().ai.open_ai_model}")
|
|
328
387
|
else:
|
|
329
|
-
ai_manager = OpenAIManager(model=get_config().ai.open_ai_model, api_key=get_config(
|
|
388
|
+
ai_manager = OpenAIManager(model=get_config().ai.open_ai_model, api_key=get_config(
|
|
389
|
+
).ai.open_ai_api_key, api_url=get_config().ai.open_ai_url, logger=logger)
|
|
330
390
|
else:
|
|
331
391
|
ai_manager = None
|
|
332
392
|
if ai_manager:
|
|
@@ -334,14 +394,17 @@ def get_ai_prompt_result(lines: List[GameLine], sentence: str, current_line: Gam
|
|
|
334
394
|
current_ai_config = get_config().ai
|
|
335
395
|
|
|
336
396
|
if not ai_manager:
|
|
337
|
-
logger.error(
|
|
397
|
+
logger.error(
|
|
398
|
+
"AI is enabled but the AI Manager did not initialize. Check your AI Config IN GSM.")
|
|
338
399
|
return ""
|
|
339
400
|
return ai_manager.process(lines, sentence, current_line, game_title, custom_prompt=custom_prompt)
|
|
340
401
|
except Exception as e:
|
|
341
|
-
logger.error(
|
|
402
|
+
logger.error(
|
|
403
|
+
"Error caught while trying to get AI prompt result. Check logs for more details.")
|
|
342
404
|
logger.debug(e, exc_info=True)
|
|
343
405
|
return ""
|
|
344
406
|
|
|
407
|
+
|
|
345
408
|
def ai_config_changed(config, current):
|
|
346
409
|
if not current:
|
|
347
410
|
return True
|
|
@@ -370,83 +433,156 @@ if __name__ == '__main__':
|
|
|
370
433
|
logging.basicConfig(level=logging.DEBUG)
|
|
371
434
|
lines = [
|
|
372
435
|
# Sexual/Explicit Japanese words and phrases
|
|
373
|
-
GameLine(index=0, text="ねぇ、あたしのおっぱい、揉んでみない?",
|
|
374
|
-
|
|
375
|
-
GameLine(index=
|
|
376
|
-
|
|
436
|
+
GameLine(index=0, text="ねぇ、あたしのおっぱい、揉んでみない?",
|
|
437
|
+
id=None, time=None, prev=None, next=None),
|
|
438
|
+
GameLine(index=1, text="お前、本当に痴女だな。股が開いてるぜ。",
|
|
439
|
+
id=None, time=None, prev=None, next=None),
|
|
440
|
+
GameLine(index=2, text="今夜は熱い夜にしましょうね…ふふ。",
|
|
441
|
+
id=None, time=None, prev=None, next=None),
|
|
442
|
+
GameLine(index=3, text="あぁ…もっと奥まで…ダメ…イッちゃう…!",
|
|
443
|
+
id=None, time=None, prev=None, next=None),
|
|
377
444
|
GameLine(index=4, text="あんたみたいなやつ、生きてる価値ないわ。さっさと自害しろ。", id=None, time=None, prev=None,
|
|
378
445
|
next=None),
|
|
379
|
-
GameLine(index=5, text="このブス!誰がお前なんかを相手にするかよ。",
|
|
380
|
-
|
|
381
|
-
GameLine(index=
|
|
382
|
-
|
|
383
|
-
GameLine(index=
|
|
384
|
-
|
|
385
|
-
GameLine(index=
|
|
386
|
-
|
|
387
|
-
GameLine(index=
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
GameLine(index=
|
|
394
|
-
next=None),
|
|
395
|
-
GameLine(index=
|
|
396
|
-
next=None),
|
|
397
|
-
GameLine(index=3, text="くっ…!調子に乗るなよ…!", id=None, time=None, prev=None, next=None),
|
|
398
|
-
GameLine(index=4, text="あんたみたいなやつ、生きてる価値ないわ。さっさと自害しろ。", id=None, time=None, prev=None,
|
|
399
|
-
next=None),
|
|
400
|
-
GameLine(index=5, text="この能無しが!誰がお前なんかを相手にするかよ。", id=None, time=None, prev=None,
|
|
401
|
-
next=None),
|
|
402
|
-
GameLine(index=6, text="黙れ。これ以上喋るなら、その舌を引っこ抜いてやる。", id=None, time=None, prev=None,
|
|
403
|
-
next=None),
|
|
404
|
-
GameLine(index=7, text="次会ったら、ぶっ殺してやるからな。", id=None, time=None, prev=None, next=None),
|
|
405
|
-
GameLine(index=8, text="はっ、望むところだ。返り討ちにしてやる。", id=None, time=None, prev=None, next=None),
|
|
406
|
-
GameLine(index=9, text="お前の顔も見たくない。地獄に落ちろ。", id=None, time=None, prev=None, next=None),
|
|
407
|
-
GameLine(index=10, text="自害しろ", id=None, time=None, prev=None, next=None),
|
|
408
|
-
GameLine(index=11, text="この臆病者が!逃げることしか能がないのか?!", id=None, time=None, prev=None, next=None),
|
|
409
|
-
GameLine(index=12, text="俺の拳で黙らせてやるよ。", id=None, time=None, prev=None, next=None),
|
|
410
|
-
GameLine(index=13, text="くそっ…覚えてろよ…!このままじゃ終わらせない…!", id=None, time=None, prev=None,
|
|
411
|
-
next=None),
|
|
446
|
+
GameLine(index=5, text="このブス!誰がお前なんかを相手にするかよ。",
|
|
447
|
+
id=None, time=None, prev=None, next=None),
|
|
448
|
+
GameLine(index=6, text="こんにちは、元気ですか?", id=None,
|
|
449
|
+
time=None, prev=None, next=None),
|
|
450
|
+
GameLine(index=7, text="次会ったら、ぶっ殺してやるからな。",
|
|
451
|
+
id=None, time=None, prev=None, next=None),
|
|
452
|
+
GameLine(index=8, text="今日はいい天気ですね。", id=None,
|
|
453
|
+
time=None, prev=None, next=None),
|
|
454
|
+
GameLine(index=9, text="お前の体、隅々まで味わい尽くしてやる。",
|
|
455
|
+
id=None, time=None, prev=None, next=None),
|
|
456
|
+
GameLine(index=10, text="自害しろ", id=None,
|
|
457
|
+
time=None, prev=None, next=None),
|
|
458
|
+
GameLine(index=11, text="この売女!金のために魂まで売るのか?!",
|
|
459
|
+
id=None, time=None, prev=None, next=None),
|
|
460
|
+
GameLine(index=12, text="俺の股間のモノで黙らせてやるよ。",
|
|
461
|
+
id=None, time=None, prev=None, next=None),
|
|
462
|
+
GameLine(index=13, text="くっ…イク…頭が…おかしくなりそう…!",
|
|
463
|
+
id=None, time=None, prev=None, next=None),
|
|
412
464
|
]
|
|
413
465
|
|
|
414
|
-
|
|
415
|
-
|
|
466
|
+
# lines = [
|
|
467
|
+
# # A back-and-forth dialogue of insults and threats
|
|
468
|
+
# GameLine(index=0, text="お前、ここで何をしている?目障りだ。",
|
|
469
|
+
# id=None, time=None, prev=None, next=None),
|
|
470
|
+
# GameLine(index=1, text="それはこっちのセリフだ。さっさと消えろ、クズが。", id=None, time=None, prev=None,
|
|
471
|
+
# next=None),
|
|
472
|
+
# GameLine(index=2, text="口だけは達者だな。やれるもんならやってみろよ。", id=None, time=None, prev=None,
|
|
473
|
+
# next=None),
|
|
474
|
+
# GameLine(index=3, text="くっ…!調子に乗るなよ…!", id=None,
|
|
475
|
+
# time=None, prev=None, next=None),
|
|
476
|
+
# GameLine(index=4, text="あんたみたいなやつ、生きてる価値ないわ。さっさと自害しろ。", id=None, time=None, prev=None,
|
|
477
|
+
# next=None),
|
|
478
|
+
# GameLine(index=5, text="この能無しが!誰がお前なんかを相手にするかよ。", id=None, time=None, prev=None,
|
|
479
|
+
# next=None),
|
|
480
|
+
# GameLine(index=6, text="黙れ。これ以上喋るなら、その舌を引っこ抜いてやる。", id=None, time=None, prev=None,
|
|
481
|
+
# next=None),
|
|
482
|
+
# GameLine(index=7, text="次会ったら、ぶっ殺してやるからな。",
|
|
483
|
+
# id=None, time=None, prev=None, next=None),
|
|
484
|
+
# GameLine(index=8, text="はっ、望むところだ。返り討ちにしてやる。",
|
|
485
|
+
# id=None, time=None, prev=None, next=None),
|
|
486
|
+
# GameLine(index=9, text="お前の顔も見たくない。地獄に落ちろ。",
|
|
487
|
+
# id=None, time=None, prev=None, next=None),
|
|
488
|
+
# GameLine(index=10, text="自害しろ", id=None,
|
|
489
|
+
# time=None, prev=None, next=None),
|
|
490
|
+
# GameLine(index=11, text="この臆病者が!逃げることしか能がないのか?!",
|
|
491
|
+
# id=None, time=None, prev=None, next=None),
|
|
492
|
+
# GameLine(index=12, text="俺の拳で黙らせてやるよ。", id=None,
|
|
493
|
+
# time=None, prev=None, next=None),
|
|
494
|
+
# GameLine(index=13, text="くそっ…覚えてろよ…!このままじゃ終わらせない…!", id=None, time=None, prev=None,
|
|
495
|
+
# next=None),
|
|
496
|
+
# ]
|
|
497
|
+
|
|
498
|
+
# Completely neutral Japanese sentences
|
|
499
|
+
#
|
|
500
|
+
# lines = [
|
|
501
|
+
# GameLine(index=0, text="今日はいい天気ですね。", id=None,
|
|
502
|
+
# time=None, prev=None, next=None),
|
|
503
|
+
# GameLine(index=1, text="おはようございます。", id=None,
|
|
504
|
+
# time=None, prev=None, next=None),
|
|
505
|
+
# GameLine(index=2, text="お元気ですか?", id=None,
|
|
506
|
+
# time=None, prev=None, next=None),
|
|
507
|
+
# GameLine(index=3, text="これはペンです。", id=None,
|
|
508
|
+
# time=None, prev=None, next=None),
|
|
509
|
+
# GameLine(index=4, text="私は学生です。", id=None,
|
|
510
|
+
# time=None, prev=None, next=None),
|
|
511
|
+
# GameLine(index=5, text="東京は日本の首都です。", id=None,
|
|
512
|
+
# time=None, prev=None, next=None),
|
|
513
|
+
# GameLine(index=6, text="こんにちは、元気ですか?", id=None,
|
|
514
|
+
# time=None, prev=None, next=None),
|
|
515
|
+
# GameLine(index=7, text="さようなら。また会いましょう。", id=None,
|
|
516
|
+
# time=None, prev=None, next=None),
|
|
517
|
+
# GameLine(index=8, text="ありがとう。助かりました。", id=None,
|
|
518
|
+
# time=None, prev=None, next=None),
|
|
519
|
+
# GameLine(index=9, text="すみません、道に迷いました。", id=None,
|
|
520
|
+
# time=None, prev=None, next=None),
|
|
521
|
+
# GameLine(index=10, text="これは本です。", id=None,
|
|
522
|
+
# time=None, prev=None, next=None),
|
|
523
|
+
# GameLine(index=11, text="私は先生です。", id=None,
|
|
524
|
+
# time=None, prev=None, next=None),
|
|
525
|
+
# ]
|
|
526
|
+
|
|
527
|
+
sentence = "あぁ…もっと奥まで…ダメ…イッちゃう…!"
|
|
528
|
+
current_line = lines[3]
|
|
416
529
|
game_title = "Corrupted Reality"
|
|
417
530
|
|
|
418
|
-
get_config().ai.provider = AIType.OPENAI.value
|
|
419
|
-
models = [
|
|
420
|
-
# 'openai/gpt-oss-20b',
|
|
421
|
-
# 'meta-llama-3.1-8b-instruct',
|
|
422
|
-
'google/gemma-3n-e4b',
|
|
423
|
-
# 'google/gemma-2-2b-it',
|
|
424
|
-
# 'google/gemma-2b-it',
|
|
425
|
-
# 'facebook/nllb-200-distilled-600M',
|
|
426
|
-
# 'meta-llama/Llama-3.2-1B-Instruct',
|
|
427
|
-
# 'facebook/nllb-200-1.3B'
|
|
428
|
-
]
|
|
429
|
-
|
|
430
531
|
results = []
|
|
431
532
|
|
|
533
|
+
# get_config().ai.provider = AIType.GEMINI.value
|
|
534
|
+
# models = [
|
|
535
|
+
# 'gemini-2.5-pro']
|
|
536
|
+
|
|
432
537
|
# for model in models:
|
|
433
|
-
# get_config().ai.
|
|
538
|
+
# get_config().ai.gemini_model = model
|
|
434
539
|
# start_time = time.time()
|
|
435
540
|
# result = get_ai_prompt_result(lines, sentence, current_line, game_title, True)
|
|
436
|
-
# results.append({"model": model,"response": result, "time": time.time() - start_time, "iteration": 1})
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
get_config().ai.
|
|
441
|
-
|
|
442
|
-
for i in range(
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
541
|
+
# results.append({"model": model, "response": result, "time": time.time() - start_time, "iteration": 1})
|
|
542
|
+
|
|
543
|
+
get_config().ai.provider = AIType.OPENAI.value
|
|
544
|
+
get_config().ai.open_ai_url = "https://api.openai.com/v1"
|
|
545
|
+
get_config().ai.open_ai_model = "gpt-5-nano-2025-08-07"
|
|
546
|
+
|
|
547
|
+
for i in range(5):
|
|
548
|
+
start_time = time.time()
|
|
549
|
+
result = get_ai_prompt_result(
|
|
550
|
+
lines, sentence, current_line, game_title, True)
|
|
551
|
+
results.append({"model": get_config().ai.open_ai_model,
|
|
552
|
+
"response": result, "time": time.time() - start_time, "iteration": i})
|
|
553
|
+
|
|
554
|
+
# get_config().ai.provider = AIType.OPENAI.value
|
|
555
|
+
# models = [
|
|
556
|
+
# # 'openai/gpt-oss-20b',
|
|
557
|
+
# # 'meta-llama-3.1-8b-instruct',
|
|
558
|
+
# 'google/gemma-3n-e4b',
|
|
559
|
+
# # 'google/gemma-2-2b-it',
|
|
560
|
+
# # 'google/gemma-2b-it',
|
|
561
|
+
# # 'facebook/nllb-200-distilled-600M',
|
|
562
|
+
# # 'meta-llama/Llama-3.2-1B-Instruct',
|
|
563
|
+
# # 'facebook/nllb-200-1.3B'
|
|
564
|
+
# ]
|
|
565
|
+
|
|
566
|
+
# results = []
|
|
567
|
+
|
|
568
|
+
# # for model in models:
|
|
569
|
+
# # get_config().ai.local_model = model
|
|
570
|
+
# # start_time = time.time()
|
|
571
|
+
# # result = get_ai_prompt_result(lines, sentence, current_line, game_title, True)
|
|
572
|
+
# # results.append({"model": model,"response": result, "time": time.time() - start_time, "iteration": 1})
|
|
573
|
+
|
|
574
|
+
# # Second Time after Already Loaded
|
|
575
|
+
|
|
576
|
+
# get_config().ai.open_ai_url = "http://127.0.0.1:1234/v1"
|
|
577
|
+
# get_config().ai.open_ai_api_key = "lm-studio"
|
|
578
|
+
# for i in range(1, 10):
|
|
579
|
+
# for model in models:
|
|
580
|
+
# get_config().ai.open_ai_model = model
|
|
581
|
+
# start_time = time.time()
|
|
582
|
+
# result = get_ai_prompt_result(lines, sentence, current_line, game_title, True)
|
|
583
|
+
# print(result)
|
|
584
|
+
# results.append({"model": model, "response": result, "time": time.time() - start_time, "iteration": i})
|
|
585
|
+
# results[model] = {"response": result, "time": time.time() - start_time}
|
|
450
586
|
|
|
451
587
|
# get_config().ai.provider = "Gemini"
|
|
452
588
|
#
|
|
@@ -464,10 +600,11 @@ if __name__ == '__main__':
|
|
|
464
600
|
times = []
|
|
465
601
|
for result in results:
|
|
466
602
|
times.append(result['time'])
|
|
467
|
-
print(
|
|
603
|
+
print(
|
|
604
|
+
f"Model: {result['model']}\nResult: {result['response']}\nTime: {result['time']:.2f} seconds\n{'-'*80}\n")
|
|
468
605
|
|
|
469
|
-
print(
|
|
606
|
+
print(
|
|
607
|
+
f"Average time: {sum(times)/len(times):.2f} seconds over {len(times)} runs.")
|
|
470
608
|
# Set up logging
|
|
471
609
|
|
|
472
610
|
# Test the function
|
|
473
|
-
|
|
@@ -6,7 +6,7 @@ GameSentenceMiner/gsm.py,sha256=JZUj8PkwHW6RPzJ-rvpOYMY7F-TO7EsHRWSzlZqPE8w,3406
|
|
|
6
6
|
GameSentenceMiner/obs.py,sha256=Xa4-SFm2Ftlnk6KX_XaDWjhSEPW_7rSDRe9WDNW8JLY,29377
|
|
7
7
|
GameSentenceMiner/vad.py,sha256=cujto_lzAuljldBBnve4JuK54tInWVGSp7NGg-xkaCY,19830
|
|
8
8
|
GameSentenceMiner/ai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
-
GameSentenceMiner/ai/ai_prompting.py,sha256=
|
|
9
|
+
GameSentenceMiner/ai/ai_prompting.py,sha256=eKFzvIIeoav6MjQJtrhsNvSfHa566f_ZBZRk4Qy9Lhw,29192
|
|
10
10
|
GameSentenceMiner/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
11
|
GameSentenceMiner/assets/icon.png,sha256=9GRL8uXUAgkUSlvbm9Pv9o2poFVRGdW6s2ub_DeUD9M,937624
|
|
12
12
|
GameSentenceMiner/assets/icon128.png,sha256=l90j7biwdz5ahwOd5wZ-406ryEV9Pan93dquJQ3e1CI,18395
|
|
@@ -90,9 +90,9 @@ GameSentenceMiner/web/templates/utility.html,sha256=KtqnZUMAYs5XsEdC9Tlsd40NKAVi
|
|
|
90
90
|
GameSentenceMiner/web/templates/components/navigation.html,sha256=6y9PvM3nh8LY6JWrZb6zVOm0vqkBLDc6d3gB9X5lT_w,1055
|
|
91
91
|
GameSentenceMiner/web/templates/components/theme-styles.html,sha256=hiq3zdJljpRjQO1iUA7gfFKwXebltG-IWW-gnKS4GHA,3439
|
|
92
92
|
GameSentenceMiner/wip/__init___.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
93
|
-
gamesentenceminer-2.17.
|
|
94
|
-
gamesentenceminer-2.17.
|
|
95
|
-
gamesentenceminer-2.17.
|
|
96
|
-
gamesentenceminer-2.17.
|
|
97
|
-
gamesentenceminer-2.17.
|
|
98
|
-
gamesentenceminer-2.17.
|
|
93
|
+
gamesentenceminer-2.17.6.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
|
94
|
+
gamesentenceminer-2.17.6.dist-info/METADATA,sha256=VU7guWSOX-LaQM9ggLNVDsVUDlseVOzuAaYJYOUXFHo,7348
|
|
95
|
+
gamesentenceminer-2.17.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
96
|
+
gamesentenceminer-2.17.6.dist-info/entry_points.txt,sha256=2APEP25DbfjSxGeHtwBstMH8mulVhLkqF_b9bqzU6vQ,65
|
|
97
|
+
gamesentenceminer-2.17.6.dist-info/top_level.txt,sha256=V1hUY6xVSyUEohb0uDoN4UIE6rUZ_JYx8yMyPGX4PgQ,18
|
|
98
|
+
gamesentenceminer-2.17.6.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|