jarvis-ai-assistant 0.3.1__py3-none-any.whl → 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
jarvis/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # -*- coding: utf-8 -*-
2
2
  """Jarvis AI Assistant"""
3
3
 
4
- __version__ = "0.3.1"
4
+ __version__ = "0.3.2"
@@ -19,17 +19,17 @@ def run_cli(
19
19
  ctx: typer.Context,
20
20
  llm_type: str = typer.Option(
21
21
  "normal",
22
- "--llm_type",
22
+ "-t", "--llm_type",
23
23
  help="使用的LLM类型,可选值:'normal'(普通)或 'thinking'(思考模式)",
24
24
  ),
25
25
  task: Optional[str] = typer.Option(
26
- None, "-t", "--task", help="从命令行直接输入任务内容"
26
+ None, "-T", "--task", help="从命令行直接输入任务内容"
27
27
  ),
28
28
  model_group: Optional[str] = typer.Option(
29
- None, "--llm_group", help="使用的模型组,覆盖配置文件中的设置"
29
+ None, "-g", "--llm_group", help="使用的模型组,覆盖配置文件中的设置"
30
30
  ),
31
31
  tool_group: Optional[str] = typer.Option(
32
- None, "--tool_group", help="使用的工具组,覆盖配置文件中的设置"
32
+ None, "-G", "--tool_group", help="使用的工具组,覆盖配置文件中的设置"
33
33
  ),
34
34
  config_file: Optional[str] = typer.Option(
35
35
  None, "-f", "--config", help="自定义配置文件路径"
@@ -50,13 +50,12 @@ def cli(
50
50
  ),
51
51
  llm_type: str = typer.Option(
52
52
  "normal",
53
- "--llm_type",
53
+ "-t", "--llm_type",
54
54
  help="使用的LLM类型,覆盖配置文件中的设置",
55
55
  ),
56
56
  model_group: Optional[str] = typer.Option(
57
- None, "--llm_group", help="使用的模型组,覆盖配置文件中的设置"
58
- ),
59
- ):
57
+ None, "-g", "--llm_group", help="使用的模型组,覆盖配置文件中的设置"
58
+ ),):
60
59
  """Main entry point for Jarvis agent"""
61
60
  # Initialize environment
62
61
  init_env("欢迎使用 Jarvis AI 助手,您的智能助理已准备就绪!", config_file=config_file)
@@ -60,6 +60,28 @@ class ShareManager(ABC):
60
60
  subprocess.run(
61
61
  ["git", "clone", self.central_repo_url, self.repo_path], check=True
62
62
  )
63
+ # 检查并添加.gitignore文件
64
+ gitignore_path = os.path.join(self.repo_path, ".gitignore")
65
+ modified = False
66
+ if not os.path.exists(gitignore_path):
67
+ with open(gitignore_path, "w") as f:
68
+ f.write("__pycache__/\n")
69
+ modified = True
70
+ else:
71
+ with open(gitignore_path, "r+") as f:
72
+ content = f.read()
73
+ if "__pycache__" not in content:
74
+ f.write("\n__pycache__/\n")
75
+ modified = True
76
+
77
+ if modified:
78
+ subprocess.run(["git", "add", ".gitignore"], cwd=self.repo_path, check=True)
79
+ subprocess.run(
80
+ ["git", "commit", "-m", "chore: add __pycache__ to .gitignore"],
81
+ cwd=self.repo_path,
82
+ check=True
83
+ )
84
+ subprocess.run(["git", "push"], cwd=self.repo_path, check=True)
63
85
  else:
64
86
  PrettyOutput.print(
65
87
  f"正在更新中心{self.get_resource_type()}仓库...", OutputType.INFO
@@ -642,11 +642,11 @@ class CodeAgent:
642
642
  def cli(
643
643
  llm_type: str = typer.Option(
644
644
  "normal",
645
- "--llm_type",
645
+ "-t", "--llm_type",
646
646
  help="使用的LLM类型,可选值:'normal'(普通)或 'thinking'(思考模式)",
647
647
  ),
648
648
  model_group: Optional[str] = typer.Option(
649
- None, "--llm_group", help="使用的模型组,覆盖配置文件中的设置"
649
+ None, "-g", "--llm_group", help="使用的模型组,覆盖配置文件中的设置"
650
650
  ),
651
651
  requirement: Optional[str] = typer.Option(
652
652
  None, "-r", "--requirement", help="要处理的需求描述"
@@ -8,6 +8,12 @@ import sys
8
8
  from typing import Any, Dict, List, Optional
9
9
 
10
10
  import typer
11
+ from jarvis.jarvis_utils.config import (
12
+ get_normal_platform_name,
13
+ get_normal_model_name,
14
+ get_thinking_platform_name,
15
+ get_thinking_model_name,
16
+ )
11
17
 
12
18
  from jarvis.jarvis_platform.registry import PlatformRegistry
13
19
  from jarvis.jarvis_utils.input import get_multiline_input, get_single_line_input
@@ -19,22 +25,26 @@ app = typer.Typer(help="Jarvis AI 平台")
19
25
 
20
26
 
21
27
  @app.command("info")
22
- def list_platforms() -> None:
23
- """列出所有支持的平台和模型。"""
28
+ def list_platforms(
29
+ platform: Optional[str] = typer.Option(
30
+ None, "--platform", "-p", help="指定要查看的平台"
31
+ )
32
+ ) -> None:
33
+ """列出所有支持的平台和模型,或指定平台的详细信息。"""
24
34
  registry = PlatformRegistry.get_global_platform_registry()
25
- platforms = registry.get_available_platforms()
35
+ platform_names = [platform] if platform else registry.get_available_platforms()
26
36
 
27
37
  PrettyOutput.section("Supported platforms and models", OutputType.SUCCESS)
28
38
 
29
- for platform_name in platforms:
39
+ for platform_name in platform_names:
30
40
  try:
31
41
  # Create platform instance
32
- platform = registry.create_platform(platform_name)
33
- if not platform:
42
+ platform_instance = registry.create_platform(platform_name)
43
+ if not platform_instance:
34
44
  continue
35
45
 
36
46
  # Get the list of models supported by the platform
37
- models = platform.get_model_list()
47
+ models = platform_instance.get_model_list()
38
48
 
39
49
  # Print platform name
40
50
  PrettyOutput.section(f"{platform_name}", OutputType.SUCCESS)
@@ -55,13 +65,25 @@ def list_platforms() -> None:
55
65
  PrettyOutput.print(f"创建 {platform_name} 平台失败", OutputType.WARNING)
56
66
 
57
67
 
58
- def chat_with_model(platform_name: str, model_name: str, system_prompt: str) -> None:
59
- """与指定平台和模型进行对话。"""
68
+ def chat_with_model(
69
+ platform_name: str, model_name: str, system_prompt: str, llm_type: str = "normal"
70
+ ) -> None:
71
+ """与指定平台和模型进行对话。
72
+
73
+ 参数:
74
+ platform_name: 平台名称
75
+ model_name: 模型名称
76
+ system_prompt: 系统提示语
77
+ llm_type: LLM类型,可选值:'normal'(普通)或 'thinking'(思考模式)
78
+ """
60
79
  registry = PlatformRegistry.get_global_platform_registry()
61
80
  conversation_history: List[Dict[str, str]] = [] # 存储对话记录
62
81
 
63
82
  # Create platform instance
64
83
  platform = registry.create_platform(platform_name)
84
+ if platform:
85
+ platform.set_model_name(model_name)
86
+
65
87
  if not platform:
66
88
  PrettyOutput.print(f"创建平台 {platform_name} 失败", OutputType.WARNING)
67
89
  return
@@ -100,7 +122,7 @@ def chat_with_model(platform_name: str, model_name: str, system_prompt: str) ->
100
122
  # Check if it is a clear session command
101
123
  if user_input.strip() == "/clear":
102
124
  try:
103
- platform.reset()
125
+ platform.reset() # type: ignore[no-untyped-call] # type: ignore[no-untyped-call] # type: ignore[no-untyped-call]
104
126
  platform.set_model_name(model_name) # Reinitialize session
105
127
  conversation_history = [] # 重置对话记录
106
128
  PrettyOutput.print("会话已清除", OutputType.SUCCESS)
@@ -332,11 +354,32 @@ def chat_command(
332
354
  None, "--platform", "-p", help="指定要使用的平台"
333
355
  ),
334
356
  model: Optional[str] = typer.Option(None, "--model", "-m", help="指定要使用的模型"),
357
+ llm_type: str = typer.Option(
358
+ "normal",
359
+ "-t",
360
+ "--llm_type",
361
+ help="使用的LLM类型,可选值:'normal'(普通)或 'thinking'(思考模式)",
362
+ ),
363
+ llm_group: Optional[str] = typer.Option(
364
+ None, "-g", "--llm_group", help="使用的模型组,覆盖配置文件中的设置"
365
+ ),
335
366
  ) -> None:
336
367
  """与指定平台和模型聊天。"""
368
+ # 如果未提供平台或模型参数,则从config获取默认值
369
+ platform = platform or (
370
+ get_thinking_platform_name(llm_group)
371
+ if llm_type == "thinking"
372
+ else get_normal_platform_name(llm_group)
373
+ )
374
+ model = model or (
375
+ get_thinking_model_name(llm_group)
376
+ if llm_type == "thinking"
377
+ else get_normal_model_name(llm_group)
378
+ )
379
+
337
380
  if not validate_platform_model(platform, model):
338
381
  return
339
- chat_with_model(platform, model, "") # type: ignore
382
+ chat_with_model(platform, model, "", llm_type)
340
383
 
341
384
 
342
385
  @app.command("service")
@@ -351,6 +394,9 @@ def service_command(
351
394
  ),
352
395
  ) -> None:
353
396
  """启动OpenAI兼容的API服务。"""
397
+ # 如果未提供平台或模型参数,则从config获取默认值
398
+ platform = platform or get_normal_platform_name()
399
+ model = model or get_normal_model_name()
354
400
  start_service(host=host, port=port, default_platform=platform, default_model=model)
355
401
 
356
402
 
@@ -392,6 +438,15 @@ def role_command(
392
438
  model: Optional[str] = typer.Option(
393
439
  None, "--model", "-m", help="指定要使用的模型,覆盖角色配置"
394
440
  ),
441
+ llm_type: Optional[str] = typer.Option(
442
+ None,
443
+ "-t",
444
+ "--llm_type",
445
+ help="使用的LLM类型,可选值:'normal'(普通)或 'thinking'(思考模式),覆盖角色配置",
446
+ ),
447
+ llm_group: Optional[str] = typer.Option(
448
+ None, "-g", "--llm_group", help="使用的模型组,覆盖配置文件中的设置"
449
+ ),
395
450
  ) -> None:
396
451
  """加载角色配置文件并开始对话。"""
397
452
  config_path = os.path.expanduser(config_file)
@@ -418,14 +473,54 @@ def role_command(
418
473
  PrettyOutput.print("无效的选择", OutputType.ERROR)
419
474
  return
420
475
 
476
+ # 获取llm_type,优先使用命令行参数,否则使用角色配置,默认为normal
477
+ role_llm_type = llm_type or selected_role.get("llm_type", "normal")
478
+
421
479
  # 初始化平台和模型
422
- platform_name = platform or selected_role["platform"]
423
- model_name = model or selected_role["model"]
480
+ # 如果提供了platform或model参数,优先使用命令行参数
481
+ # 否则,如果提供了llm_group,根据llm_type从配置中获取
482
+ # 最后才使用角色配置中的platform和model
483
+ if platform:
484
+ platform_name = platform
485
+ elif llm_group:
486
+ platform_name = (
487
+ get_thinking_platform_name(llm_group)
488
+ if role_llm_type == "thinking"
489
+ else get_normal_platform_name(llm_group)
490
+ )
491
+ else:
492
+ platform_name = selected_role.get("platform")
493
+ if not platform_name:
494
+ # 如果角色配置中没有platform,使用默认配置
495
+ platform_name = (
496
+ get_thinking_platform_name()
497
+ if role_llm_type == "thinking"
498
+ else get_normal_platform_name()
499
+ )
500
+
501
+ if model:
502
+ model_name = model
503
+ elif llm_group:
504
+ model_name = (
505
+ get_thinking_model_name(llm_group)
506
+ if role_llm_type == "thinking"
507
+ else get_normal_model_name(llm_group)
508
+ )
509
+ else:
510
+ model_name = selected_role.get("model")
511
+ if not model_name:
512
+ # 如果角色配置中没有model,使用默认配置
513
+ model_name = (
514
+ get_thinking_model_name()
515
+ if role_llm_type == "thinking"
516
+ else get_normal_model_name()
517
+ )
518
+
424
519
  system_prompt = selected_role.get("system_prompt", "")
425
520
 
426
521
  # 开始对话
427
522
  PrettyOutput.print(f"已选择角色: {selected_role['name']}", OutputType.SUCCESS)
428
- chat_with_model(platform_name, model_name, system_prompt)
523
+ chat_with_model(platform_name, model_name, system_prompt, role_llm_type)
429
524
 
430
525
 
431
526
  def main() -> None:
@@ -12,6 +12,7 @@ from typing import Dict, List, Optional, Any
12
12
  from collections import defaultdict
13
13
  import sys
14
14
  import time
15
+ import uuid
15
16
 
16
17
 
17
18
  class StatsStorage:
@@ -76,7 +77,9 @@ class StatsStorage:
76
77
  def _save_json(self, filepath: Path, data: Dict):
77
78
  """保存JSON文件"""
78
79
  # 使用临时文件+重命名的原子操作来避免并发写入问题
79
- temp_filepath = filepath.with_suffix(".tmp")
80
+ # 使用唯一的临时文件名避免并发冲突
81
+ temp_suffix = f".tmp.{uuid.uuid4().hex[:8]}"
82
+ temp_filepath = filepath.with_suffix(temp_suffix)
80
83
  max_retries = 3
81
84
 
82
85
  for attempt in range(max_retries):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: jarvis-ai-assistant
3
- Version: 0.3.1
3
+ Version: 0.3.2
4
4
  Summary: Jarvis: An AI assistant that uses tools to interact with the system
5
5
  Home-page: https://github.com/skyfireitdiy/Jarvis
6
6
  Author: skyfire
@@ -1,12 +1,12 @@
1
- jarvis/__init__.py,sha256=XcoTQZ6UV9jwJibx5WyZw3mcC4mymdL1YQyJuL5GUfY,73
1
+ jarvis/__init__.py,sha256=lEwB7boP3teN71H_7lteEF13tfqihaQwtXiRqbFAWro,73
2
2
  jarvis/jarvis_agent/__init__.py,sha256=2zScM4N6GN4Vv5y_jDmJjTirea8ZGtRMiPf-UaGl0XE,25789
3
3
  jarvis/jarvis_agent/agent_manager.py,sha256=YzpMiF0H2-eyk2kn2o24Bkj3bXsQx7Pv2vfD4gWepo0,2893
4
4
  jarvis/jarvis_agent/builtin_input_handler.py,sha256=Qs4LAr4xdKLBJpQE81YP4CkucAop86ms0iVoKa1nnso,2468
5
5
  jarvis/jarvis_agent/config_editor.py,sha256=Ctk82sO6w2cNW0-_5L7Bomj-hgM4U7WwMc52fwhAJyg,1809
6
6
  jarvis/jarvis_agent/edit_file_handler.py,sha256=w-byNJ4TN_SlV3djjfFC7OksySOFGrM8ku49w662dzc,11854
7
7
  jarvis/jarvis_agent/file_methodology_manager.py,sha256=h2ogMK9mSKjg_n04ITw24m28J_U225bhLNhfwpf9jpU,4383
8
- jarvis/jarvis_agent/jarvis.py,sha256=yCXqNVF9S-IX02tNfPTw4WesMuO3F0Ht3Z0u1Srv7VI,3234
9
- jarvis/jarvis_agent/main.py,sha256=56pLVy6v-3ZdyPCcWXdRkgbjmYsoIfC7zrA6B7sYivU,3334
8
+ jarvis/jarvis_agent/jarvis.py,sha256=2XMuMA3A4ihE4RAo-XnYZHnJ0ZrGRFagE1s-eiGdZ9Q,3252
9
+ jarvis/jarvis_agent/main.py,sha256=Sd4-OnBcMqY5i7vb-Riy_JT2fGfuANtgiAWvWFY8LXM,3345
10
10
  jarvis/jarvis_agent/memory_manager.py,sha256=F7HTNzdN1_-cSygnz7zKSJRJvPLUOosqcXQeiW8zG4U,5266
11
11
  jarvis/jarvis_agent/methodology_share_manager.py,sha256=vwWNexluTXSI3qeNP3zJAemOjWW37o_1AlqDR1C8wCI,6910
12
12
  jarvis/jarvis_agent/output_handler.py,sha256=P7oWpXBGFfOsWq7cIhS_z9crkQ19ES7qU5pM92KKjAs,1172
@@ -14,14 +14,14 @@ jarvis/jarvis_agent/prompt_builder.py,sha256=PH1fPDVa8z_RXkoXHJFNDf8PQjUoLNLYwkh
14
14
  jarvis/jarvis_agent/prompts.py,sha256=X6cXa-n0xqBQ8LDTgLsD0kqziAh1s0cNp89i4mxcvHg,9444
15
15
  jarvis/jarvis_agent/protocols.py,sha256=JWnJDikFEuwvFUv7uzXu0ggJ4O9K2FkMnfVCwIJ5REw,873
16
16
  jarvis/jarvis_agent/session_manager.py,sha256=DnvI9rWkVmkyO1XfKZyo9lTn4ajg4ccwzEkoRHFPOJM,2925
17
- jarvis/jarvis_agent/share_manager.py,sha256=3lXwoHv6TZb6nxkSN0r2BHLMk6J09HC0csT3C9Ep5w0,6615
17
+ jarvis/jarvis_agent/share_manager.py,sha256=7w25cX2zupnBDdn_HDusMyOAXbHQMWKYVZYq9i4EJs0,7619
18
18
  jarvis/jarvis_agent/shell_input_handler.py,sha256=1IboqdxcJuoIqRpmDU10GugR9fWXUHyCEbVF4nIWbyo,1328
19
19
  jarvis/jarvis_agent/task_analyzer.py,sha256=-fQ9YBYFcc-Z1FSoDIPzRfAgkREFoIOXtU2TdBkB-e0,4656
20
20
  jarvis/jarvis_agent/task_manager.py,sha256=HJm4_SMpsFbQMUUsAZeHm7cZuhNbz28YW-DRLYgoarc,4422
21
21
  jarvis/jarvis_agent/tool_executor.py,sha256=nIq-sPNgrtimtM-IHpN09cWmId8jDzWRdCFoRzXnnoo,1721
22
22
  jarvis/jarvis_agent/tool_share_manager.py,sha256=R5ONIQlDXX9pFq3clwHFhEW8BAJ3ECaR2DqWCEC9tzM,5205
23
23
  jarvis/jarvis_code_agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- jarvis/jarvis_code_agent/code_agent.py,sha256=cFLvSnUMQoQ_RkQ4MsZaln1G6DrxBF6u4Lgz79q2bSI,29305
24
+ jarvis/jarvis_code_agent/code_agent.py,sha256=mFcvj3F2ZJS550Cbv8TrVyY1evQVow8SI6B-LCdRfxI,29317
25
25
  jarvis/jarvis_code_agent/lint.py,sha256=LZPsfyZPMo7Wm7LN4osZocuNJwZx1ojacO3MlF870x8,4009
26
26
  jarvis/jarvis_code_analysis/code_review.py,sha256=TMov1pqDe1bg0vM1ndnYeW9ejHrRN_jMroo3T4L9yag,32368
27
27
  jarvis/jarvis_code_analysis/checklists/__init__.py,sha256=LIXAYa1sW3l7foP6kohLWnE98I_EQ0T7z5bYKHq6rJA,78
@@ -66,7 +66,7 @@ jarvis/jarvis_platform/registry.py,sha256=1bMy0YZUa8NLzuZlKfC4CBtpa0iniypTxUZk0H
66
66
  jarvis/jarvis_platform/tongyi.py,sha256=uZP5ceCbHPApimKBqKthP5QynG52C3tMBglIyoBHwaY,22186
67
67
  jarvis/jarvis_platform/yuanbao.py,sha256=mS4aywK9CzgFU6FHh6GUxyY1Ly-NoBtGkBi74Jo_0XM,22921
68
68
  jarvis/jarvis_platform_manager/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
- jarvis/jarvis_platform_manager/main.py,sha256=VhP0qiLqGPk_nQ_j0xg6H5CSct5ckNBaFqohb82whYs,17490
69
+ jarvis/jarvis_platform_manager/main.py,sha256=fRZbtCH6zrG7NXTMlplf0Oq4gRYkhebvPm3RrQtN_WU,20988
70
70
  jarvis/jarvis_platform_manager/service.py,sha256=myJYGSUclCEiRTf3JKs4JndwhXJeQj7MQQy4i13jMt0,13767
71
71
  jarvis/jarvis_rag/__init__.py,sha256=HRTXgnQxDuaE9x-e3r6SYqhJ5d4DSI_rrIxy2IGY6qk,320
72
72
  jarvis/jarvis_rag/cache.py,sha256=Tqx_Oe-AhuWlMXHGHUaIuG6OEHoHBVZq7mL3kldtFFU,2723
@@ -82,7 +82,7 @@ jarvis/jarvis_smart_shell/main.py,sha256=ReCC9bWPlgl84ylI0uvdzlE3J6fS0XzFSLOpQQy
82
82
  jarvis/jarvis_stats/__init__.py,sha256=jJzgP43nxzLbNGs8Do4Jfta1PNCJMf1Oq9YTPd6EnFM,342
83
83
  jarvis/jarvis_stats/cli.py,sha256=KqLH-9Kd_YlBJSke3QXY90XnFmiH2kYkRacL8ygtSsM,12649
84
84
  jarvis/jarvis_stats/stats.py,sha256=qLyOJvWAv0fgV7oohAUSQ2W2E1Hr4wWgEQXDOiI-4Cg,17674
85
- jarvis/jarvis_stats/storage.py,sha256=0hs-TkmvWavsf6J2LLOLXyyZzVK8g77jecRnt89MzYE,12724
85
+ jarvis/jarvis_stats/storage.py,sha256=MBQRxExIWdePXzY1EE8JAs1IEpMqamImpgjruqt_u9A,12853
86
86
  jarvis/jarvis_stats/visualizer.py,sha256=ZIBmGELzs6c7qM01tQql1HF6eFKn6HDGVQfKXRUUIY0,8529
87
87
  jarvis/jarvis_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
88
88
  jarvis/jarvis_tools/ask_user.py,sha256=M6DdLNryCE8y1JcdZHEifUgZkPUEPNKc-zDW5p0Mb1k,2029
@@ -117,9 +117,9 @@ jarvis/jarvis_utils/methodology.py,sha256=i8-chZtggN3GbhaDzeLV4eBl0DP3I5zctZ-I5H
117
117
  jarvis/jarvis_utils/output.py,sha256=QRLlKObQKT0KuRSeZRqYb7NlTQvsd1oZXZ41WxeWEuU,10894
118
118
  jarvis/jarvis_utils/tag.py,sha256=f211opbbbTcSyzCDwuIK_oCnKhXPNK-RknYyGzY1yD0,431
119
119
  jarvis/jarvis_utils/utils.py,sha256=RwhyPfBGx_x9OKawWUVw6bAbZeI4IlvxBhhYQ_RHpWQ,40847
120
- jarvis_ai_assistant-0.3.1.dist-info/licenses/LICENSE,sha256=AGgVgQmTqFvaztRtCAXsAMryUymB18gZif7_l2e1XOg,1063
121
- jarvis_ai_assistant-0.3.1.dist-info/METADATA,sha256=4A6JaiqkYbe-sqjWL5vB8MPelkKGP03DulZn-Y-zE3Y,16807
122
- jarvis_ai_assistant-0.3.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
123
- jarvis_ai_assistant-0.3.1.dist-info/entry_points.txt,sha256=8cwi1VxZGU5UeSZMFiH-jG6NK95Asjukj5SBLBrGiGo,1257
124
- jarvis_ai_assistant-0.3.1.dist-info/top_level.txt,sha256=1BOxyWfzOP_ZXj8rVTDnNCJ92bBGB0rwq8N1PCpoMIs,7
125
- jarvis_ai_assistant-0.3.1.dist-info/RECORD,,
120
+ jarvis_ai_assistant-0.3.2.dist-info/licenses/LICENSE,sha256=AGgVgQmTqFvaztRtCAXsAMryUymB18gZif7_l2e1XOg,1063
121
+ jarvis_ai_assistant-0.3.2.dist-info/METADATA,sha256=DL481NElSBIOwMBr1GXz6tfFmzR1vE86Z0keZubnZ_E,16807
122
+ jarvis_ai_assistant-0.3.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
123
+ jarvis_ai_assistant-0.3.2.dist-info/entry_points.txt,sha256=8cwi1VxZGU5UeSZMFiH-jG6NK95Asjukj5SBLBrGiGo,1257
124
+ jarvis_ai_assistant-0.3.2.dist-info/top_level.txt,sha256=1BOxyWfzOP_ZXj8rVTDnNCJ92bBGB0rwq8N1PCpoMIs,7
125
+ jarvis_ai_assistant-0.3.2.dist-info/RECORD,,