repr-cli 0.2.20__py3-none-any.whl → 0.2.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
repr/cli.py CHANGED
@@ -1031,7 +1031,11 @@ def generate(
1031
1031
 
1032
1032
  if not repo_commits:
1033
1033
  if not json_output:
1034
- console.print(f" No matching commits found")
1034
+ if commits:
1035
+ console.print(f" [dim]No commits matching specified SHAs[/]")
1036
+ else:
1037
+ filter_days_used = days if days else 90
1038
+ console.print(f" [dim]No commits in the last {filter_days_used} days[/]")
1035
1039
  continue
1036
1040
 
1037
1041
  # Filter out commits that are already part of existing stories (unless --force)
repr/openai_analysis.py CHANGED
@@ -240,7 +240,9 @@ No corporate fluff. No "enhanced", "improved", "robust". Just say what happened.
240
240
 
241
241
  {commits_formatted}"""
242
242
 
243
- try:
243
+ async def make_request(use_temperature: bool = True):
244
+ """Make API request, optionally with temperature."""
245
+ temp_kwargs = {"temperature": EXTRACTION_TEMPERATURE} if use_temperature else {}
244
246
  if structured:
245
247
  # Use structured output with Pydantic model
246
248
  response = await client.beta.chat.completions.parse(
@@ -249,9 +251,9 @@ No corporate fluff. No "enhanced", "improved", "robust". Just say what happened.
249
251
  {"role": "system", "content": system_prompt},
250
252
  {"role": "user", "content": user_prompt},
251
253
  ],
252
- temperature=EXTRACTION_TEMPERATURE,
253
254
  max_tokens=16000,
254
255
  response_format=ExtractedCommitBatch,
256
+ **temp_kwargs,
255
257
  )
256
258
 
257
259
  parsed = response.choices[0].message.parsed
@@ -292,15 +294,23 @@ No corporate fluff. No "enhanced", "improved", "robust". Just say what happened.
292
294
  {"role": "system", "content": system_prompt},
293
295
  {"role": "user", "content": user_prompt},
294
296
  ],
295
- temperature=EXTRACTION_TEMPERATURE,
296
297
  max_tokens=16000,
298
+ **temp_kwargs,
297
299
  )
298
-
300
+
299
301
  return response.choices[0].message.content or ""
302
+
303
+ try:
304
+ return await make_request(use_temperature=True)
300
305
  except Exception as e:
301
- error_msg = str(e).lower()
306
+ error_msg = str(e)
307
+ error_lower = error_msg.lower()
308
+ # Handle models that don't support custom temperature
309
+ if "temperature" in error_lower and "unsupported" in error_lower:
310
+ # Retry without temperature parameter
311
+ return await make_request(use_temperature=False)
302
312
  # Handle content moderation blocks gracefully
303
- if "blocked" in error_msg or "content" in error_msg or "moderation" in error_msg:
313
+ if "blocked" in error_lower or "content" in error_lower or "moderation" in error_lower:
304
314
  # Skip this batch but continue with others
305
315
  if structured:
306
316
  return [
@@ -421,18 +431,29 @@ Synthesize this into a cohesive developer profile in Markdown format starting wi
421
431
 
422
432
  Focus on CONCRETE technical accomplishments AND the reasoning behind key decisions. For each major feature or system, explain WHY it was built that way—what problem it solved, what user need it addressed, or what technical constraint it navigated."""
423
433
 
424
- response = await client.chat.completions.create(
425
- model=model,
426
- messages=[
427
- {"role": "system", "content": system_prompt},
428
- {"role": "user", "content": user_prompt},
429
- ],
430
- temperature=SYNTHESIS_TEMPERATURE,
431
- max_tokens=16000, # Increased for reasoning models
432
- )
433
-
434
+ async def make_synthesis_request(use_temperature: bool = True):
435
+ temp_kwargs = {"temperature": SYNTHESIS_TEMPERATURE} if use_temperature else {}
436
+ return await client.chat.completions.create(
437
+ model=model,
438
+ messages=[
439
+ {"role": "system", "content": system_prompt},
440
+ {"role": "user", "content": user_prompt},
441
+ ],
442
+ max_tokens=16000, # Increased for reasoning models
443
+ **temp_kwargs,
444
+ )
445
+
446
+ try:
447
+ response = await make_synthesis_request(use_temperature=True)
448
+ except Exception as e:
449
+ error_msg = str(e).lower()
450
+ if "temperature" in error_msg and "unsupported" in error_msg:
451
+ response = await make_synthesis_request(use_temperature=False)
452
+ else:
453
+ raise
454
+
434
455
  llm_content = response.choices[0].message.content or ""
435
-
456
+
436
457
  # Prepend metadata header
437
458
  return f"{metadata_header}\n\n---\n\n{llm_content}"
438
459
 
@@ -731,16 +752,27 @@ Preserve and highlight the "why" explanations that demonstrate engineering judgm
731
752
  progress=95.0,
732
753
  )
733
754
 
734
- response = await client.chat.completions.create(
735
- model=final_synthesis_model,
736
- messages=[
737
- {"role": "system", "content": system_prompt},
738
- {"role": "user", "content": user_prompt},
739
- ],
740
- temperature=SYNTHESIS_TEMPERATURE,
741
- max_tokens=16000,
742
- )
743
-
755
+ async def make_final_request(use_temperature: bool = True):
756
+ temp_kwargs = {"temperature": SYNTHESIS_TEMPERATURE} if use_temperature else {}
757
+ return await client.chat.completions.create(
758
+ model=final_synthesis_model,
759
+ messages=[
760
+ {"role": "system", "content": system_prompt},
761
+ {"role": "user", "content": user_prompt},
762
+ ],
763
+ max_tokens=16000,
764
+ **temp_kwargs,
765
+ )
766
+
767
+ try:
768
+ response = await make_final_request(use_temperature=True)
769
+ except Exception as e:
770
+ error_msg = str(e).lower()
771
+ if "temperature" in error_msg and "unsupported" in error_msg:
772
+ response = await make_final_request(use_temperature=False)
773
+ else:
774
+ raise
775
+
744
776
  if progress_callback:
745
777
  progress_callback(
746
778
  step="Complete",
@@ -748,9 +780,9 @@ Preserve and highlight the "why" explanations that demonstrate engineering judgm
748
780
  repo="",
749
781
  progress=100.0,
750
782
  )
751
-
783
+
752
784
  llm_content = response.choices[0].message.content or ""
753
-
785
+
754
786
  # Prepend metadata header
755
787
  return f"{metadata_header}\n\n---\n\n{llm_content}"
756
788
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: repr-cli
3
- Version: 0.2.20
3
+ Version: 0.2.22
4
4
  Summary: A beautiful, privacy-first CLI that analyzes your code repositories and generates a compelling developer profile
5
5
  Author-email: Repr <hello@repr.dev>
6
6
  License: MIT License
@@ -3,7 +3,7 @@ repr/__main__.py,sha256=N7amYwdGB3yzk2ZJJbtH2hhESNkDuhDL11dDEm5Kl60,166
3
3
  repr/api.py,sha256=rJRn_4xZXipdBFMrsZbQPWfZKfPLWJpTI0uYUyvjFhw,22814
4
4
  repr/auth.py,sha256=TpqwqwZ3tAEolcSYu-zD8oHhzfwHALkauPP1xg5VTiY,12208
5
5
  repr/change_synthesis.py,sha256=z7GmCeEHQFlnqLtKDGDvlM7p9MAWl_ByeIJstEVAhbU,15223
6
- repr/cli.py,sha256=peB96KMbYppZogPnzmSGvFld4HJGD55mXHg_9YBHyPs,225931
6
+ repr/cli.py,sha256=Dy466jKddASU6OvCgtWbik4X3p-xIymQ1R2CzMBfedQ,226157
7
7
  repr/config.py,sha256=S69hdgFdvcHoIO2zihuvsSAQf2Gp41JtC5GGlE4Cy78,34233
8
8
  repr/configure.py,sha256=GnwjOC64F0uDD90IjA6LJNev8FlHHAHARuSLwBqI6k0,26860
9
9
  repr/cron.py,sha256=Hvo9ssVmGn09dLIHKWqzorKkW7eXdLQnQlBzagTX2Ko,11402
@@ -16,7 +16,7 @@ repr/keychain.py,sha256=CpKU3tjFZVEPgiHiplSAtBQFDPA6qOSovv4IXXgJXbY,6957
16
16
  repr/llm.py,sha256=inABX2kwEhPnON7sjCzcTMZZeCf0k3G08LyrKsi6Sko,14637
17
17
  repr/mcp_server.py,sha256=IhQM35bMD5c-6ASYIAa9VfnrvxzvFlZntUqkBm08Xqk,39752
18
18
  repr/models.py,sha256=mQAkP1bBiAFPweC0OxU-UwKNLZkinvVYHB0KjItHt3Q,20093
19
- repr/openai_analysis.py,sha256=FZY9pAlC2zdIjxxXxvZ2C3F65nqOp5OpXHS4Bu26jec,30527
19
+ repr/openai_analysis.py,sha256=Pz3KMT5B91dcHOKPUQlFoMpTKlzjDO5idnmhkyAb6y8,31965
20
20
  repr/privacy.py,sha256=sN1tkoZjCDSwAjkQWeH6rHaLrtv727yT1HNHQ54GRis,9834
21
21
  repr/session_extractor.py,sha256=t1rEyhndjxMREt3gfmcGBYzFGEwzt1kAYbmXPq-QbU8,17104
22
22
  repr/storage.py,sha256=y_EYYKrUD2qNRKK2_vdjsIlPIq-IzfaNMUyj9aHofpQ,24223
@@ -50,9 +50,9 @@ repr/loaders/base.py,sha256=AE9lFr8ZvPYt6KDwBTkNv3JF5A2QakVn9gA_ha77GLU,4308
50
50
  repr/loaders/claude_code.py,sha256=sWAiQgNVWsdw9qUDcfHDBi5k6jBL7D8_SI3NAEsL-io,11106
51
51
  repr/loaders/clawdbot.py,sha256=daKfTjI16tZrlwGUNaVOnLwxKyV6eW102CgIOu4mwAw,12064
52
52
  repr/loaders/gemini_antigravity.py,sha256=_0HhtC1TwB2gSu20Bcco_W-V3Bt6v9O2iqOL6kIHQLU,13766
53
- repr_cli-0.2.20.dist-info/licenses/LICENSE,sha256=tI16Ry3IQhjsde6weJ_in6czzWW2EF4Chz1uicyDLAA,1061
54
- repr_cli-0.2.20.dist-info/METADATA,sha256=ncrhnw-aKNUpTryGzMykMWej-HqKrUSxsByhzADZx3E,13387
55
- repr_cli-0.2.20.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
56
- repr_cli-0.2.20.dist-info/entry_points.txt,sha256=dlI-TCeDTW2rBC_nvOvMhwLihU4qsgD5r4Ot5BuVqSw,56
57
- repr_cli-0.2.20.dist-info/top_level.txt,sha256=LNgPqdJPQnlicRve7uzI4a6rEUdcxHrNkUq_2w7eeiA,5
58
- repr_cli-0.2.20.dist-info/RECORD,,
53
+ repr_cli-0.2.22.dist-info/licenses/LICENSE,sha256=tI16Ry3IQhjsde6weJ_in6czzWW2EF4Chz1uicyDLAA,1061
54
+ repr_cli-0.2.22.dist-info/METADATA,sha256=hj5gXRgLB7yBxBKAs7L5K7HidSCwOVPH8EpWJ8CWSE0,13387
55
+ repr_cli-0.2.22.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
56
+ repr_cli-0.2.22.dist-info/entry_points.txt,sha256=dlI-TCeDTW2rBC_nvOvMhwLihU4qsgD5r4Ot5BuVqSw,56
57
+ repr_cli-0.2.22.dist-info/top_level.txt,sha256=LNgPqdJPQnlicRve7uzI4a6rEUdcxHrNkUq_2w7eeiA,5
58
+ repr_cli-0.2.22.dist-info/RECORD,,