repr-cli 0.2.22__tar.gz → 0.2.24__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {repr_cli-0.2.22 → repr_cli-0.2.24}/PKG-INFO +1 -1
- {repr_cli-0.2.22 → repr_cli-0.2.24}/pyproject.toml +1 -1
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/change_synthesis.py +39 -17
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/cli.py +60 -21
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/session_extractor.py +18 -7
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/story_synthesis.py +51 -19
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr_cli.egg-info/PKG-INFO +1 -1
- {repr_cli-0.2.22 → repr_cli-0.2.24}/LICENSE +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/README.md +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/__init__.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/__main__.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/api.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/auth.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/config.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/configure.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/cron.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/__init__.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/build.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-B-aCjaCw.js +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-BYFVbEev.css +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-BrrhyJFO.css +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-C7Gzxc4f.js +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-CQdMXo6g.js +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-CcEg74ts.js +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-Cerc-iA_.js +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-CjVcBW2L.css +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-Cs8ofFGd.js +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-Dfl3mR5E.js +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-DwN0SeMc.css +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/assets/index-YFch_e0S.js +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/favicon.svg +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/dist/index.html +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/manager.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/dashboard/server.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/db.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/discovery.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/doctor.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/extractor.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/hooks.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/keychain.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/llm.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/loaders/__init__.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/loaders/base.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/loaders/claude_code.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/loaders/clawdbot.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/loaders/gemini_antigravity.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/mcp_server.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/models.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/openai_analysis.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/privacy.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/storage.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/telemetry.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/templates.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/timeline.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/tools.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/ui.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr/updater.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr_cli.egg-info/SOURCES.txt +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr_cli.egg-info/dependency_links.txt +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr_cli.egg-info/entry_points.txt +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr_cli.egg-info/requires.txt +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/repr_cli.egg-info/top_level.txt +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/setup.cfg +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/setup.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/tests/test_deduplication.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/tests/test_environment_variables.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/tests/test_network_sandboxing.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/tests/test_privacy_guarantees.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/tests/test_profile_export.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/tests/test_repo_identity.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/tests/test_stories_review.py +0 -0
- {repr_cli-0.2.22 → repr_cli-0.2.24}/tests/test_token_budget.py +0 -0
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "repr-cli"
|
|
7
|
-
version = "0.2.
|
|
7
|
+
version = "0.2.24"
|
|
8
8
|
description = "A beautiful, privacy-first CLI that analyzes your code repositories and generates a compelling developer profile"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
license = {file = "LICENSE"}
|
|
@@ -418,14 +418,25 @@ async def explain_group(
|
|
|
418
418
|
changes=changes_str,
|
|
419
419
|
)
|
|
420
420
|
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
421
|
+
async def make_request(use_temperature: bool = True):
|
|
422
|
+
kwargs = {
|
|
423
|
+
"model": model,
|
|
424
|
+
"messages": [
|
|
425
|
+
{"role": "system", "content": GROUP_EXPLAIN_SYSTEM},
|
|
426
|
+
{"role": "user", "content": prompt},
|
|
427
|
+
],
|
|
428
|
+
}
|
|
429
|
+
if use_temperature:
|
|
430
|
+
kwargs["temperature"] = 0.7
|
|
431
|
+
return await client.chat.completions.create(**kwargs)
|
|
432
|
+
|
|
433
|
+
try:
|
|
434
|
+
response = await make_request(use_temperature=True)
|
|
435
|
+
except Exception as e:
|
|
436
|
+
if "temperature" in str(e).lower() and "unsupported" in str(e).lower():
|
|
437
|
+
response = await make_request(use_temperature=False)
|
|
438
|
+
else:
|
|
439
|
+
raise
|
|
429
440
|
|
|
430
441
|
return response.choices[0].message.content.strip()
|
|
431
442
|
|
|
@@ -455,15 +466,26 @@ async def synthesize_changes(
|
|
|
455
466
|
unpushed=format_commit_changes(report.unpushed),
|
|
456
467
|
)
|
|
457
468
|
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
469
|
+
async def make_synthesis_request(use_temperature: bool = True):
|
|
470
|
+
kwargs = {
|
|
471
|
+
"model": model,
|
|
472
|
+
"messages": [
|
|
473
|
+
{"role": "system", "content": CHANGE_SYNTHESIS_SYSTEM},
|
|
474
|
+
{"role": "user", "content": prompt},
|
|
475
|
+
],
|
|
476
|
+
"response_format": {"type": "json_object"},
|
|
477
|
+
}
|
|
478
|
+
if use_temperature:
|
|
479
|
+
kwargs["temperature"] = 0.7
|
|
480
|
+
return await client.chat.completions.create(**kwargs)
|
|
481
|
+
|
|
482
|
+
try:
|
|
483
|
+
response = await make_synthesis_request(use_temperature=True)
|
|
484
|
+
except Exception as e:
|
|
485
|
+
if "temperature" in str(e).lower() and "unsupported" in str(e).lower():
|
|
486
|
+
response = await make_synthesis_request(use_temperature=False)
|
|
487
|
+
else:
|
|
488
|
+
raise
|
|
467
489
|
|
|
468
490
|
content = response.choices[0].message.content
|
|
469
491
|
data = json.loads(content)
|
|
@@ -6075,16 +6075,29 @@ def create_branch(
|
|
|
6075
6075
|
|
|
6076
6076
|
prompt = COMMIT_MESSAGE_USER.format(changes=changes_str)
|
|
6077
6077
|
|
|
6078
|
-
|
|
6079
|
-
|
|
6080
|
-
model
|
|
6081
|
-
messages
|
|
6078
|
+
async def make_branch_request(use_temperature: bool = True):
|
|
6079
|
+
kwargs = {
|
|
6080
|
+
"model": "gpt-4o-mini",
|
|
6081
|
+
"messages": [
|
|
6082
6082
|
{"role": "system", "content": COMMIT_MESSAGE_SYSTEM},
|
|
6083
6083
|
{"role": "user", "content": prompt},
|
|
6084
6084
|
],
|
|
6085
|
-
response_format
|
|
6086
|
-
|
|
6087
|
-
|
|
6085
|
+
"response_format": {"type": "json_object"},
|
|
6086
|
+
}
|
|
6087
|
+
if use_temperature:
|
|
6088
|
+
kwargs["temperature"] = 0.3
|
|
6089
|
+
return await client.chat.completions.create(**kwargs)
|
|
6090
|
+
|
|
6091
|
+
async def get_branch_response():
|
|
6092
|
+
try:
|
|
6093
|
+
return await make_branch_request(use_temperature=True)
|
|
6094
|
+
except Exception as e:
|
|
6095
|
+
if "temperature" in str(e).lower() and "unsupported" in str(e).lower():
|
|
6096
|
+
return await make_branch_request(use_temperature=False)
|
|
6097
|
+
raise
|
|
6098
|
+
|
|
6099
|
+
with create_spinner("Generating branch name..."):
|
|
6100
|
+
response = asyncio.run(get_branch_response())
|
|
6088
6101
|
data = json.loads(response.choices[0].message.content)
|
|
6089
6102
|
branch_name = data.get("branch", "")
|
|
6090
6103
|
commit_msg = data.get("message", "")
|
|
@@ -6227,16 +6240,29 @@ def commit_staged(
|
|
|
6227
6240
|
changes_str = format_file_changes(staged)
|
|
6228
6241
|
prompt = COMMIT_MESSAGE_USER.format(changes=changes_str)
|
|
6229
6242
|
|
|
6230
|
-
|
|
6231
|
-
|
|
6232
|
-
model
|
|
6233
|
-
messages
|
|
6243
|
+
async def make_commit_request(use_temperature: bool = True):
|
|
6244
|
+
kwargs = {
|
|
6245
|
+
"model": "gpt-4o-mini",
|
|
6246
|
+
"messages": [
|
|
6234
6247
|
{"role": "system", "content": COMMIT_MESSAGE_SYSTEM},
|
|
6235
6248
|
{"role": "user", "content": prompt},
|
|
6236
6249
|
],
|
|
6237
|
-
response_format
|
|
6238
|
-
|
|
6239
|
-
|
|
6250
|
+
"response_format": {"type": "json_object"},
|
|
6251
|
+
}
|
|
6252
|
+
if use_temperature:
|
|
6253
|
+
kwargs["temperature"] = 0.3
|
|
6254
|
+
return await client.chat.completions.create(**kwargs)
|
|
6255
|
+
|
|
6256
|
+
async def get_commit_response():
|
|
6257
|
+
try:
|
|
6258
|
+
return await make_commit_request(use_temperature=True)
|
|
6259
|
+
except Exception as e:
|
|
6260
|
+
if "temperature" in str(e).lower() and "unsupported" in str(e).lower():
|
|
6261
|
+
return await make_commit_request(use_temperature=False)
|
|
6262
|
+
raise
|
|
6263
|
+
|
|
6264
|
+
with create_spinner("Generating commit message..."):
|
|
6265
|
+
response = asyncio.run(get_commit_response())
|
|
6240
6266
|
data = json.loads(response.choices[0].message.content)
|
|
6241
6267
|
branch_name = data.get("branch", "")
|
|
6242
6268
|
commit_msg = data.get("message", "")
|
|
@@ -6665,16 +6691,29 @@ def create_pr(
|
|
|
6665
6691
|
|
|
6666
6692
|
prompt = PR_USER.format(commits=commits_text)
|
|
6667
6693
|
|
|
6668
|
-
|
|
6669
|
-
|
|
6670
|
-
model
|
|
6671
|
-
messages
|
|
6694
|
+
async def make_pr_request(use_temperature: bool = True):
|
|
6695
|
+
kwargs = {
|
|
6696
|
+
"model": "gpt-4o-mini",
|
|
6697
|
+
"messages": [
|
|
6672
6698
|
{"role": "system", "content": PR_SYSTEM},
|
|
6673
6699
|
{"role": "user", "content": prompt},
|
|
6674
6700
|
],
|
|
6675
|
-
response_format
|
|
6676
|
-
|
|
6677
|
-
|
|
6701
|
+
"response_format": {"type": "json_object"},
|
|
6702
|
+
}
|
|
6703
|
+
if use_temperature:
|
|
6704
|
+
kwargs["temperature"] = 0.3
|
|
6705
|
+
return await client.chat.completions.create(**kwargs)
|
|
6706
|
+
|
|
6707
|
+
async def get_pr_response():
|
|
6708
|
+
try:
|
|
6709
|
+
return await make_pr_request(use_temperature=True)
|
|
6710
|
+
except Exception as e:
|
|
6711
|
+
if "temperature" in str(e).lower() and "unsupported" in str(e).lower():
|
|
6712
|
+
return await make_pr_request(use_temperature=False)
|
|
6713
|
+
raise
|
|
6714
|
+
|
|
6715
|
+
with create_spinner("Generating PR..."):
|
|
6716
|
+
response = asyncio.run(get_pr_response())
|
|
6678
6717
|
data = json.loads(response.choices[0].message.content)
|
|
6679
6718
|
pr_title = data.get("title", current_branch)
|
|
6680
6719
|
pr_body = data.get("body", "")
|
|
@@ -319,18 +319,29 @@ class SessionExtractor:
|
|
|
319
319
|
# Call LLM (using sync client to avoid event loop cleanup issues)
|
|
320
320
|
client = self._get_client()
|
|
321
321
|
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
model
|
|
325
|
-
messages
|
|
322
|
+
def make_extraction_request(use_temperature: bool = True):
|
|
323
|
+
kwargs = {
|
|
324
|
+
"model": self.model.split("/")[-1] if "/" in self.model else self.model,
|
|
325
|
+
"messages": [
|
|
326
326
|
{"role": "system", "content": EXTRACTION_SYSTEM_PROMPT},
|
|
327
327
|
{"role": "user", "content": EXTRACTION_USER_PROMPT.format(
|
|
328
328
|
session_transcript=transcript
|
|
329
329
|
)},
|
|
330
330
|
],
|
|
331
|
-
response_format
|
|
332
|
-
|
|
333
|
-
|
|
331
|
+
"response_format": {"type": "json_object"},
|
|
332
|
+
}
|
|
333
|
+
if use_temperature:
|
|
334
|
+
kwargs["temperature"] = 0.3
|
|
335
|
+
return client.chat.completions.create(**kwargs)
|
|
336
|
+
|
|
337
|
+
try:
|
|
338
|
+
try:
|
|
339
|
+
response = make_extraction_request(use_temperature=True)
|
|
340
|
+
except Exception as e:
|
|
341
|
+
if "temperature" in str(e).lower() and "unsupported" in str(e).lower():
|
|
342
|
+
response = make_extraction_request(use_temperature=False)
|
|
343
|
+
else:
|
|
344
|
+
raise
|
|
334
345
|
|
|
335
346
|
# Parse response
|
|
336
347
|
content = response.choices[0].message.content
|
|
@@ -559,10 +559,20 @@ class StorySynthesizer:
|
|
|
559
559
|
return llm_config["local_model"]
|
|
560
560
|
elif default_mode == "cloud" and llm_config.get("cloud_model"):
|
|
561
561
|
return llm_config["cloud_model"]
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
562
|
+
elif default_mode.startswith("byok:"):
|
|
563
|
+
# BYOK mode - use the provider's configured model
|
|
564
|
+
provider = default_mode.split(":", 1)[1]
|
|
565
|
+
byok_config = llm_config.get("byok", {}).get(provider, {})
|
|
566
|
+
if byok_config.get("model"):
|
|
567
|
+
return byok_config["model"]
|
|
568
|
+
|
|
569
|
+
# Fallback to BYOK model if configured
|
|
570
|
+
byok = llm_config.get("byok", {})
|
|
571
|
+
for provider_config in byok.values():
|
|
572
|
+
if provider_config.get("model"):
|
|
573
|
+
return provider_config["model"]
|
|
574
|
+
|
|
575
|
+
# Fallback to cloud model
|
|
566
576
|
if llm_config.get("cloud_model"):
|
|
567
577
|
return llm_config["cloud_model"]
|
|
568
578
|
except Exception:
|
|
@@ -648,18 +658,29 @@ class StorySynthesizer:
|
|
|
648
658
|
client = self._get_client()
|
|
649
659
|
commits_text = self._format_commits_for_prompt(commits)
|
|
650
660
|
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
model
|
|
654
|
-
messages
|
|
661
|
+
def make_request(use_temperature: bool = True):
|
|
662
|
+
kwargs = {
|
|
663
|
+
"model": self.model.split("/")[-1] if "/" in self.model else self.model,
|
|
664
|
+
"messages": [
|
|
655
665
|
{"role": "system", "content": STORY_SYNTHESIS_SYSTEM},
|
|
656
666
|
{"role": "user", "content": STORY_SYNTHESIS_USER.format(
|
|
657
667
|
commits_text=commits_text
|
|
658
668
|
)},
|
|
659
669
|
],
|
|
660
|
-
response_format
|
|
661
|
-
|
|
662
|
-
|
|
670
|
+
"response_format": {"type": "json_object"},
|
|
671
|
+
}
|
|
672
|
+
if use_temperature:
|
|
673
|
+
kwargs["temperature"] = 0.3
|
|
674
|
+
return client.chat.completions.create(**kwargs)
|
|
675
|
+
|
|
676
|
+
try:
|
|
677
|
+
try:
|
|
678
|
+
response = make_request(use_temperature=True)
|
|
679
|
+
except Exception as e:
|
|
680
|
+
if "temperature" in str(e).lower() and "unsupported" in str(e).lower():
|
|
681
|
+
response = make_request(use_temperature=False)
|
|
682
|
+
else:
|
|
683
|
+
raise
|
|
663
684
|
|
|
664
685
|
content = response.choices[0].message.content
|
|
665
686
|
|
|
@@ -1061,17 +1082,28 @@ async def transform_story_for_feed(
|
|
|
1061
1082
|
)
|
|
1062
1083
|
response_model = InternalStory
|
|
1063
1084
|
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
messages=[
|
|
1085
|
+
def make_story_request(use_temperature: bool = True):
|
|
1086
|
+
kwargs = {
|
|
1087
|
+
"model": model_name.split("/")[-1] if "/" in model_name else model_name,
|
|
1088
|
+
"messages": [
|
|
1069
1089
|
{"role": "system", "content": system_prompt},
|
|
1070
1090
|
{"role": "user", "content": user_prompt},
|
|
1071
1091
|
],
|
|
1072
|
-
response_format
|
|
1073
|
-
|
|
1074
|
-
|
|
1092
|
+
"response_format": {"type": "json_object"},
|
|
1093
|
+
}
|
|
1094
|
+
if use_temperature:
|
|
1095
|
+
kwargs["temperature"] = 0.7
|
|
1096
|
+
return client.chat.completions.create(**kwargs)
|
|
1097
|
+
|
|
1098
|
+
try:
|
|
1099
|
+
# Use sync client to avoid event loop cleanup issues
|
|
1100
|
+
try:
|
|
1101
|
+
response = make_story_request(use_temperature=True)
|
|
1102
|
+
except Exception as e:
|
|
1103
|
+
if "temperature" in str(e).lower() and "unsupported" in str(e).lower():
|
|
1104
|
+
response = make_story_request(use_temperature=False)
|
|
1105
|
+
else:
|
|
1106
|
+
raise
|
|
1075
1107
|
|
|
1076
1108
|
content = response.choices[0].message.content.strip()
|
|
1077
1109
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|