zrb 1.3.1__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
zrb/__init__.py CHANGED
@@ -33,6 +33,7 @@ from zrb.input.option_input import OptionInput
33
33
  from zrb.input.password_input import PasswordInput
34
34
  from zrb.input.str_input import StrInput
35
35
  from zrb.input.text_input import TextInput
36
+ from zrb.llm_config import llm_config
36
37
  from zrb.runner.cli import cli
37
38
  from zrb.runner.web_config.config_factory import web_config
38
39
  from zrb.runner.web_schema.user import User
@@ -101,6 +102,7 @@ assert ContentTransformer
101
102
  assert Scaffolder
102
103
  assert Scheduler
103
104
  assert cli
105
+ assert llm_config
104
106
  assert Xcom
105
107
  assert web_config
106
108
  assert User
@@ -2,14 +2,12 @@ import json
2
2
  import os
3
3
  from typing import Any
4
4
 
5
- from pydantic_ai.models import Model
6
-
7
5
  from zrb.builtin.group import llm_group
8
6
  from zrb.builtin.llm.tool.api import get_current_location, get_current_weather
9
7
  from zrb.builtin.llm.tool.cli import run_shell_command
10
8
  from zrb.builtin.llm.tool.file import (
11
- list_file,
12
- read_source_code,
9
+ list_files,
10
+ read_all_files,
13
11
  read_text_file,
14
12
  write_text_file,
15
13
  )
@@ -24,18 +22,14 @@ from zrb.config import (
24
22
  LLM_ALLOW_ACCESS_LOCAL_FILE,
25
23
  LLM_ALLOW_ACCESS_SHELL,
26
24
  LLM_HISTORY_DIR,
27
- LLM_MODEL,
28
25
  LLM_SYSTEM_PROMPT,
29
26
  SERP_API_KEY,
30
27
  )
31
- from zrb.context.any_context import AnyContext
32
28
  from zrb.context.any_shared_context import AnySharedContext
33
- from zrb.input.any_input import AnyInput
34
29
  from zrb.input.bool_input import BoolInput
35
30
  from zrb.input.str_input import StrInput
36
31
  from zrb.input.text_input import TextInput
37
32
  from zrb.task.llm_task import LLMTask
38
- from zrb.util.attr import get_attr
39
33
  from zrb.util.file import read_file, write_file
40
34
  from zrb.util.string.conversion import to_pascal_case
41
35
 
@@ -90,46 +84,37 @@ def _write_chat_conversation(
90
84
  write_file(last_session_file_path, current_session_name)
91
85
 
92
86
 
93
- class _LLMChat(LLMTask):
94
-
95
- _default_model: Model | str | None = None
96
-
97
- def set_default_model(self, model: Model | str):
98
- self._default_model = model
99
-
100
- @property
101
- def inputs(self) -> list[AnyInput]:
102
- task_inputs = super().inputs
103
- model_input_default = LLM_MODEL if self._default_model is None else "default"
104
- return [
87
+ llm_chat: LLMTask = llm_group.add_task(
88
+ LLMTask(
89
+ name="llm-chat",
90
+ input=[
105
91
  StrInput(
106
92
  "model",
107
93
  description="LLM Model",
108
94
  prompt="LLM Model",
109
- default=model_input_default,
95
+ default="",
110
96
  allow_positional_parsing=False,
111
97
  always_prompt=False,
98
+ allow_empty=True,
99
+ ),
100
+ StrInput(
101
+ "base-url",
102
+ description="LLM API Base URL",
103
+ prompt="LLM API Base URL",
104
+ default="",
105
+ allow_positional_parsing=False,
106
+ always_prompt=False,
107
+ allow_empty=True,
108
+ ),
109
+ StrInput(
110
+ "api-key",
111
+ description="LLM API Key",
112
+ prompt="LLM API Key",
113
+ default="",
114
+ allow_positional_parsing=False,
115
+ always_prompt=False,
116
+ allow_empty=True,
112
117
  ),
113
- *task_inputs,
114
- ]
115
-
116
- def _get_model(self, ctx: AnyContext) -> str | Model | None:
117
- if ctx.input.model == "default":
118
- if self._default_model is not None:
119
- return self._default_model
120
- return super()._get_model(ctx)
121
- model = get_attr(
122
- ctx, ctx.input.model, "ollama_chat/llama3.1", auto_render=self._render_model
123
- )
124
- if isinstance(model, (Model, str)) or model is None:
125
- return model
126
- raise ValueError("Invalid model")
127
-
128
-
129
- llm_chat: LLMTask = llm_group.add_task(
130
- _LLMChat(
131
- name="llm-chat",
132
- input=[
133
118
  TextInput(
134
119
  "system-prompt",
135
120
  description="System prompt",
@@ -156,10 +141,17 @@ llm_chat: LLMTask = llm_group.add_task(
156
141
  always_prompt=False,
157
142
  ),
158
143
  ],
144
+ model=lambda ctx: None if ctx.input.model == "" else ctx.input.model,
145
+ model_base_url=lambda ctx: (
146
+ None if ctx.input.base_url == "" else ctx.input.base_url
147
+ ),
148
+ model_api_key=lambda ctx: (
149
+ None if ctx.input.api_key == "" else ctx.input.api_key
150
+ ),
159
151
  conversation_history_reader=_read_chat_conversation,
160
152
  conversation_history_writer=_write_chat_conversation,
161
153
  description="Chat with LLM",
162
- system_prompt="{ctx.input['system-prompt']}",
154
+ system_prompt="{ctx.input.system_prompt}",
163
155
  message="{ctx.input.message}",
164
156
  retries=0,
165
157
  ),
@@ -168,8 +160,8 @@ llm_chat: LLMTask = llm_group.add_task(
168
160
 
169
161
 
170
162
  if LLM_ALLOW_ACCESS_LOCAL_FILE:
171
- llm_chat.add_tool(read_source_code)
172
- llm_chat.add_tool(list_file)
163
+ llm_chat.add_tool(read_all_files)
164
+ llm_chat.add_tool(list_files)
173
165
  llm_chat.add_tool(read_text_file)
174
166
  llm_chat.add_tool(write_text_file)
175
167
 
@@ -1,19 +1,44 @@
1
+ import fnmatch
1
2
  import os
2
3
 
3
4
  from zrb.util.file import read_file, write_file
4
5
 
5
6
 
6
- def list_file(
7
+ def list_files(
7
8
  directory: str = ".",
8
- extensions: list[str] = [".py", ".go", ".js", ".ts", ".java", ".c", ".cpp"],
9
+ included_patterns: list[str] = [
10
+ "*.py",
11
+ "*.go",
12
+ "*.js",
13
+ "*.ts",
14
+ "*.java",
15
+ "*.c",
16
+ "*.cpp",
17
+ ],
18
+ excluded_patterns: list[str] = [
19
+ "venv",
20
+ ".venv",
21
+ "node_modules",
22
+ ".git",
23
+ "__pycache__",
24
+ ],
9
25
  ) -> list[str]:
10
- """List all files in a directory"""
26
+ """List all files in a directory that match any of the included glob patterns
27
+ and do not reside in any directory matching an excluded pattern.
28
+ Patterns are evaluated using glob-style matching.
29
+ """
11
30
  all_files: list[str] = []
12
- for root, _, files in os.walk(directory):
31
+ for root, dirs, files in os.walk(directory):
13
32
  for filename in files:
14
- for extension in extensions:
15
- if filename.lower().endswith(extension):
16
- all_files.append(os.path.join(root, filename))
33
+ if any(fnmatch.fnmatch(filename, pat) for pat in included_patterns):
34
+ full_path = os.path.join(root, filename)
35
+ # Check each component of the full path for excluded patterns.
36
+ if any(
37
+ any(fnmatch.fnmatch(part, pat) for pat in excluded_patterns)
38
+ for part in os.path.normpath(full_path).split(os.sep)
39
+ ):
40
+ continue
41
+ all_files.append(full_path)
17
42
  return all_files
18
43
 
19
44
 
@@ -27,12 +52,24 @@ def write_text_file(file: str, content: str):
27
52
  return write_file(os.path.abspath(file), content)
28
53
 
29
54
 
30
- def read_source_code(
55
+ def read_all_files(
31
56
  directory: str = ".",
32
- extensions: list[str] = [".py", ".go", ".js", ".ts", ".java", ".c", ".cpp"],
57
+ included_patterns: list[str] = [
58
+ "*.py",
59
+ "*.go",
60
+ "*.js",
61
+ "*.ts",
62
+ "*.java",
63
+ "*.c",
64
+ "*.cpp",
65
+ ],
66
+ excluded_patterns: list[str] = [],
33
67
  ) -> list[str]:
34
- """Read source code in a directory"""
35
- files = list_file(directory, extensions)
68
+ """Read all files in a directory that match any of the included glob patterns
69
+ and do not match any of the excluded glob patterns.
70
+ Patterns are evaluated using glob-style matching.
71
+ """
72
+ files = list_files(directory, included_patterns, excluded_patterns)
36
73
  for index, file in enumerate(files):
37
74
  content = read_text_file(file)
38
75
  files[index] = f"# {file}\n```\n{content}\n```"
zrb/config.py CHANGED
@@ -75,7 +75,6 @@ WEB_AUTH_ACCESS_TOKEN_EXPIRE_MINUTES = int(
75
75
  WEB_AUTH_REFRESH_TOKEN_EXPIRE_MINUTES = int(
76
76
  os.getenv("ZRB_WEB_REFRESH_TOKEN_EXPIRE_MINUTES", "60")
77
77
  )
78
- LLM_MODEL = os.getenv("ZRB_LLM_MODEL", "ollama_chat/llama3.1")
79
78
 
80
79
  _DEFAULT_PROMPT = (
81
80
  "You are a helpful AI assistant capable of using various tools to answer user queries. When solving a problem:\n"
zrb/llm_config.py ADDED
@@ -0,0 +1,50 @@
1
+ import os
2
+
3
+ from pydantic_ai.models import Model
4
+ from pydantic_ai.models.openai import OpenAIModel
5
+ from pydantic_ai.providers.openai import OpenAIProvider
6
+
7
+
8
+ class LLMConfig:
9
+
10
+ def __init__(
11
+ self,
12
+ model_name: str | None = None,
13
+ base_url: str | None = None,
14
+ api_key: str | None = None,
15
+ ):
16
+ self._model_name = (
17
+ model_name if model_name is not None else os.getenv("ZRB_LLM_MODEL", None)
18
+ )
19
+ self._base_url = (
20
+ base_url if base_url is not None else os.getenv("ZRB_LLM_BASE_URL", None)
21
+ )
22
+ self._api_key = (
23
+ api_key if api_key is not None else os.getenv("ZRB_LLM_API_KEY", None)
24
+ )
25
+ self._default_model = None
26
+
27
+ def _get_model_name(self) -> str | None:
28
+ return self._model_name if self._model_name is not None else None
29
+
30
+ def _get_model_provider(self) -> OpenAIProvider:
31
+ if self._base_url is None and self._api_key is None:
32
+ return "openai"
33
+ return OpenAIProvider(base_url=self._base_url, api_key=self._api_key)
34
+
35
+ def get_default_model(self) -> Model | str | None:
36
+ if self._default_model is not None:
37
+ return self._default_model
38
+ model_name = self._get_model_name()
39
+ if model_name is None:
40
+ return None
41
+ return OpenAIModel(
42
+ model_name=model_name,
43
+ provider=self._get_model_provider(),
44
+ )
45
+
46
+ def set_default_model(self, model: Model | str | None):
47
+ self._default_model = model
48
+
49
+
50
+ llm_config = LLMConfig()
zrb/task/llm_task.py CHANGED
@@ -18,11 +18,13 @@ from pydantic_ai.models import Model
18
18
  from pydantic_ai.settings import ModelSettings
19
19
 
20
20
  from zrb.attr.type import StrAttr, fstring
21
- from zrb.config import LLM_MODEL, LLM_SYSTEM_PROMPT
21
+ from zrb.config import LLM_SYSTEM_PROMPT
22
22
  from zrb.context.any_context import AnyContext
23
23
  from zrb.context.any_shared_context import AnySharedContext
24
24
  from zrb.env.any_env import AnyEnv
25
25
  from zrb.input.any_input import AnyInput
26
+ from zrb.llm_config import LLMConfig
27
+ from zrb.llm_config import llm_config as default_llm_config
26
28
  from zrb.task.any_task import AnyTask
27
29
  from zrb.task.base_task import BaseTask
28
30
  from zrb.util.attr import get_attr, get_str_attr
@@ -46,11 +48,15 @@ class LLMTask(BaseTask):
46
48
  env: list[AnyEnv | None] | AnyEnv | None = None,
47
49
  model: (
48
50
  Callable[[AnySharedContext], Model | str | fstring] | Model | None
49
- ) = LLM_MODEL,
51
+ ) = None,
52
+ render_model: bool = True,
53
+ model_base_url: StrAttr = None,
54
+ render_model_base_url: bool = True,
55
+ model_api_key: StrAttr = None,
56
+ render_model_api_key: bool = True,
50
57
  model_settings: (
51
58
  ModelSettings | Callable[[AnySharedContext], ModelSettings] | None
52
59
  ) = None,
53
- render_model: bool = True,
54
60
  agent: Agent | Callable[[AnySharedContext], Agent] | None = None,
55
61
  system_prompt: StrAttr | None = LLM_SYSTEM_PROMPT,
56
62
  render_system_prompt: bool = True,
@@ -105,9 +111,13 @@ class LLMTask(BaseTask):
105
111
  successor=successor,
106
112
  )
107
113
  self._model = model
114
+ self._render_model = render_model
115
+ self._model_base_url = model_base_url
116
+ self._render_model_base_url = render_model_base_url
117
+ self._model_api_key = model_api_key
118
+ self._render_model_api_key = render_model_api_key
108
119
  self._model_settings = model_settings
109
120
  self._agent = agent
110
- self._render_model = render_model
111
121
  self._system_prompt = system_prompt
112
122
  self._render_system_prompt = render_system_prompt
113
123
  self._message = message
@@ -120,9 +130,6 @@ class LLMTask(BaseTask):
120
130
  self._render_history_file = render_history_file
121
131
  self._max_call_iteration = max_call_iteration
122
132
 
123
- def set_model(self, model: Model | str):
124
- self._model = model
125
-
126
133
  def add_tool(self, tool: ToolOrCallable):
127
134
  self._additional_tools.append(tool)
128
135
 
@@ -242,15 +249,47 @@ class LLMTask(BaseTask):
242
249
  system_prompt=self._get_system_prompt(ctx),
243
250
  tools=tools,
244
251
  model_settings=self._get_model_settings(ctx),
252
+ retries=3,
245
253
  )
246
254
 
247
255
  def _get_model(self, ctx: AnyContext) -> str | Model | None:
248
- model = get_attr(
249
- ctx, self._model, "ollama_chat/llama3.1", auto_render=self._render_model
256
+ model = get_attr(ctx, self._model, None, auto_render=self._render_model)
257
+ if model is None:
258
+ return default_llm_config.get_default_model()
259
+ if isinstance(model, str):
260
+ llm_config = LLMConfig(
261
+ model_name=model,
262
+ base_url=get_attr(
263
+ ctx,
264
+ self._get_model_base_url(ctx),
265
+ None,
266
+ auto_render=self._render_model_base_url,
267
+ ),
268
+ api_key=get_attr(
269
+ ctx,
270
+ self._get_model_api_key(ctx),
271
+ None,
272
+ auto_render=self._render_model_api_key,
273
+ ),
274
+ )
275
+ return llm_config.get_default_model()
276
+ raise ValueError(f"Invalid model: {model}")
277
+
278
+ def _get_model_base_url(self, ctx: AnyContext) -> str | None:
279
+ base_url = get_attr(
280
+ ctx, self._model_base_url, None, auto_render=self._render_model_base_url
281
+ )
282
+ if isinstance(base_url, str) or base_url is None:
283
+ return base_url
284
+ raise ValueError(f"Invalid model base URL: {base_url}")
285
+
286
+ def _get_model_api_key(self, ctx: AnyContext) -> str | None:
287
+ api_key = get_attr(
288
+ ctx, self._model_api_key, None, auto_render=self._render_model_api_key
250
289
  )
251
- if isinstance(model, (Model, str)) or model is None:
252
- return model
253
- raise ValueError("Invalid model")
290
+ if isinstance(api_key, str) or api_key is None:
291
+ return api_key
292
+ raise ValueError(f"Invalid model base URL: {api_key}")
254
293
 
255
294
  def _get_system_prompt(self, ctx: AnyContext) -> str:
256
295
  return get_str_attr(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: zrb
3
- Version: 1.3.1
3
+ Version: 1.4.0
4
4
  Summary: Your Automation Powerhouse
5
5
  Home-page: https://github.com/state-alchemists/zrb
6
6
  License: AGPL-3.0-or-later
@@ -24,7 +24,7 @@ Requires-Dist: isort (>=5.13.2,<5.14.0)
24
24
  Requires-Dist: libcst (>=1.5.0,<2.0.0)
25
25
  Requires-Dist: pdfplumber (>=0.11.4,<0.12.0) ; extra == "rag"
26
26
  Requires-Dist: psutil (>=6.1.1,<7.0.0)
27
- Requires-Dist: pydantic-ai (>=0.0.31,<0.0.32)
27
+ Requires-Dist: pydantic-ai (>=0.0.42,<0.0.43)
28
28
  Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
29
29
  Requires-Dist: python-jose[cryptography] (>=3.4.0,<4.0.0)
30
30
  Requires-Dist: requests (>=2.32.3,<3.0.0)
@@ -44,47 +44,49 @@ Zrb allows you to write your automation tasks in Python. For example, you can de
44
44
 
45
45
  ```python
46
46
  import os
47
- from zrb import cli, LLMTask, CmdTask, StrInput
48
- from zrb.builtin.llm.tool.file import read_source_code, write_text_file
47
+ from zrb import cli, llm_config, LLMTask, CmdTask, StrInput, Group
48
+ from zrb.builtin.llm.tool.file import read_all_files, write_text_file
49
49
  from pydantic_ai.models.openai import OpenAIModel
50
-
50
+ from pydantic_ai.providers.openai import OpenAIProvider
51
51
 
52
52
  CURRENT_DIR = os.getcwd()
53
- OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1"
54
- OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY", "")
55
- OPENROUTER_MODEL_NAME = os.getenv(
56
- "AGENT_MODEL_NAME", "anthropic/claude-3.7-sonnet"
53
+
54
+ # Setup default LLM Config
55
+ llm_config.set_default_model(
56
+ OpenAIModel(
57
+ model_name="gpt-4o",
58
+ provider=OpenAIProvider(
59
+ base_url="https://openrouter.ai/api/v1",
60
+ api_key=os.getenv("OPENROUTER_API_KEY", "")
61
+ )
62
+ )
57
63
  )
58
64
 
65
+ # Make UML group
66
+ uml_group = cli.add_group(Group(name="uml", description="UML related tasks"))
59
67
 
60
- # Defining a LLM Task to create a Plantuml script based on source code in current directory.
61
- # User can choose the diagram type. By default it is "state diagram"
62
- make_uml = cli.add_task(
68
+ # Generate UML script
69
+ make_uml_script = uml_group.add_task(
63
70
  LLMTask(
64
- name="make-uml",
71
+ name="make-script",
65
72
  description="Creating plantuml diagram based on source code in current directory",
66
73
  input=StrInput(name="diagram", default="state diagram"),
67
- model=OpenAIModel(
68
- OPENROUTER_MODEL_NAME,
69
- base_url=OPENROUTER_BASE_URL,
70
- api_key=OPENROUTER_API_KEY,
71
- ),
72
74
  message=(
73
75
  f"Read source code in {CURRENT_DIR}, "
74
76
  "make a {ctx.input.diagram} in plantuml format. "
75
77
  f"Write the script into {CURRENT_DIR}/{{ctx.input.diagram}}.uml"
76
78
  ),
77
79
  tools=[
78
- read_source_code,
80
+ read_all_files,
79
81
  write_text_file,
80
82
  ],
81
83
  )
82
84
  )
83
85
 
84
86
  # Defining a Cmd Task to transform Plantuml script into a png image.
85
- make_png = cli.add_task(
87
+ make_uml_image = uml_group.add_task(
86
88
  CmdTask(
87
- name="make-png",
89
+ name="make-image",
88
90
  description="Creating png based on source code in current directory",
89
91
  input=StrInput(name="diagram", default="state diagram"),
90
92
  cmd="plantuml -tpng '{ctx.input.diagram}.uml'",
@@ -93,19 +95,19 @@ make_png = cli.add_task(
93
95
  )
94
96
 
95
97
  # Making sure that make_png has make_uml as its dependency.
96
- make_uml >> make_png
98
+ make_uml_script >> make_uml_image
97
99
  ```
98
100
 
99
101
  Once defined, your automation tasks are immediately accessible from the CLI. You can then invoke the tasks by invoking.
100
102
 
101
103
  ```bash
102
- zrb make-png --diagram "state diagram"
104
+ zrb uml make-image --diagram "state diagram"
103
105
  ```
104
106
 
105
107
  Or you can invoke the tasks without parameter.
106
108
 
107
109
  ```bash
108
- zrb make-png
110
+ zrb uml make-image
109
111
  ```
110
112
 
111
113
  At this point, Zrb will politely ask you to provide the diagram type.
@@ -1,4 +1,4 @@
1
- zrb/__init__.py,sha256=JYLyBeSv-FP2iVKgsXJH8Ae-Cmjp5nmmIiwqayhCOEE,2964
1
+ zrb/__init__.py,sha256=1waPjZcA3IHUEvIuVQso0YfNfW9i7SCJgEfzhiNTaCk,3020
2
2
  zrb/__main__.py,sha256=QcMnHfAFbDUFw9p9tgfFS4U0Ra9nE-TLU5YoMBiAriE,808
3
3
  zrb/attr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  zrb/attr/type.py,sha256=4TV5gPYMMrKh5V-yB6iRYKCbsXAH_AvGXMsjxKLHcUs,568
@@ -7,11 +7,11 @@ zrb/builtin/base64.py,sha256=1YnSwASp7OEAvQcsnHZGpJEvYoI1Z2zTIJ1bCDHfcPQ,921
7
7
  zrb/builtin/git.py,sha256=8_qVE_2lVQEVXQ9vhiw8Tn4Prj1VZB78ZjEJJS5Ab3M,5461
8
8
  zrb/builtin/git_subtree.py,sha256=7BKwOkVTWDrR0DXXQ4iJyHqeR6sV5VYRt8y_rEB0EHg,3505
9
9
  zrb/builtin/group.py,sha256=-phJfVpTX3_gUwS1u8-RbZUHe-X41kxDBSmrVh4rq8E,1682
10
- zrb/builtin/llm/llm_chat.py,sha256=yiL16XRy4oh5jKMzH0W7RBxfEwGKflvvrsYKleJFKWc,6221
10
+ zrb/builtin/llm/llm_chat.py,sha256=QCfxocM7UQPtpIWLMzr9wKbl9DCPcDZszAnPxszaww0,6071
11
11
  zrb/builtin/llm/previous-session.js,sha256=xMKZvJoAbrwiyHS0OoPrWuaKxWYLoyR5sguePIoCjTY,816
12
12
  zrb/builtin/llm/tool/api.py,sha256=bXFE7jihdhUscxJH8lu5imwlYH735AalbCyUTl28BaQ,826
13
13
  zrb/builtin/llm/tool/cli.py,sha256=to_IjkfrMGs6eLfG0cpVN9oyADWYsJQCtyluUhUdBww,253
14
- zrb/builtin/llm/tool/file.py,sha256=ibvh0zrsnponwyZvw6bWMUbpwSv5S5WUWCDfQ6BjVwk,1160
14
+ zrb/builtin/llm/tool/file.py,sha256=CVmAwzHrO6gk6OcnHedKiZDQhmi-0f6Tx0vJWQe1KOQ,2191
15
15
  zrb/builtin/llm/tool/rag.py,sha256=vEIThEy0JGwXEiNRLOEJAHAE0l1Qie2qvU3ryioeYMk,6066
16
16
  zrb/builtin/llm/tool/web.py,sha256=SDnCtYHZ0Q4DtLbIhc11a0UyyKbTTeW60UfeIKzK35k,3204
17
17
  zrb/builtin/md5.py,sha256=0pNlrfZA0wlZlHvFHLgyqN0JZJWGKQIF5oXxO44_OJk,949
@@ -208,7 +208,7 @@ zrb/callback/callback.py,sha256=hKefB_Jd1XGjPSLQdMKDsGLHPzEGO2dqrIArLl_EmD0,848
208
208
  zrb/cmd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
209
209
  zrb/cmd/cmd_result.py,sha256=L8bQJzWCpcYexIxHBNsXj2pT3BtLmWex0iJSMkvimOA,597
210
210
  zrb/cmd/cmd_val.py,sha256=7Doowyg6BK3ISSGBLt-PmlhzaEkBjWWm51cED6fAUOQ,1014
211
- zrb/config.py,sha256=MfHwcQ4OhCmCw6jXpFI8483Ase6YrqNGBvqYzwnwopw,4753
211
+ zrb/config.py,sha256=X0mlhmpUrYp_l4qI3CnsqOAfvxfLkteCOV9ABGF--Qc,4690
212
212
  zrb/content_transformer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
213
213
  zrb/content_transformer/any_content_transformer.py,sha256=v8ZUbcix1GGeDQwB6OKX_1TjpY__ksxWVeqibwa_iZA,850
214
214
  zrb/content_transformer/content_transformer.py,sha256=STl77wW-I69QaGzCXjvkppngYFLufow8ybPLSyAvlHs,2404
@@ -237,6 +237,7 @@ zrb/input/option_input.py,sha256=TQB82ko5odgzkULEizBZi0e9TIHEbIgvdP0AR3RhA74,213
237
237
  zrb/input/password_input.py,sha256=szBojWxSP9QJecgsgA87OIYwQrY2AQ3USIKdDZY6snU,1465
238
238
  zrb/input/str_input.py,sha256=NevZHX9rf1g8eMatPyy-kUX3DglrVAQpzvVpKAzf7bA,81
239
239
  zrb/input/text_input.py,sha256=shvVbc2U8Is36h23M5lcW8IEwKc9FR-4uEPZZroj3rU,3377
240
+ zrb/llm_config.py,sha256=SXSkDpmXxGLJaoUrT09oNdOGwHXc82TwIGssVeo6S7U,1553
240
241
  zrb/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
241
242
  zrb/runner/cli.py,sha256=HtfJQecFg2keIMK-7bVATBlTeZC_RvT8DsbUC58TMKU,6736
242
243
  zrb/runner/common_util.py,sha256=0zhZn1Jdmr194_nsL5_L-Kn9-_NDpMTI2z6_LXUQJ-U,1369
@@ -299,7 +300,7 @@ zrb/task/base_task.py,sha256=SQRf37bylS586KwyW0eYDe9JZ5Hl18FP8kScHae6y3A,21251
299
300
  zrb/task/base_trigger.py,sha256=jC722rDvodaBLeNaFghkTyv1u0QXrK6BLZUUqcmBJ7Q,4581
300
301
  zrb/task/cmd_task.py,sha256=pUKRSR4DZKjbmluB6vi7cxqyhxOLfJ2czSpYeQbiDvo,10705
301
302
  zrb/task/http_check.py,sha256=Gf5rOB2Se2EdizuN9rp65HpGmfZkGc-clIAlHmPVehs,2565
302
- zrb/task/llm_task.py,sha256=B4qhza-4fk7odI7-rv2rLYvBLt1dmZMNgKu8OK7rajM,11849
303
+ zrb/task/llm_task.py,sha256=Gf_Y8e3-U46wjnH5K36I1XJnFwwU-eTQlG5JL87UobM,13495
303
304
  zrb/task/make_task.py,sha256=PD3b_aYazthS8LHeJsLAhwKDEgdurQZpymJDKeN60u0,2265
304
305
  zrb/task/rsync_task.py,sha256=GSL9144bmp6F0EckT6m-2a1xG25AzrrWYzH4k3SVUKM,6370
305
306
  zrb/task/scaffolder.py,sha256=rME18w1HJUHXgi9eTYXx_T2G4JdqDYzBoNOkdOOo5-o,6806
@@ -340,7 +341,7 @@ zrb/util/string/name.py,sha256=8picJfUBXNpdh64GNaHv3om23QHhUZux7DguFLrXHp8,1163
340
341
  zrb/util/todo.py,sha256=1nDdwPc22oFoK_1ZTXyf3638Bg6sqE2yp_U4_-frHoc,16015
341
342
  zrb/xcom/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
342
343
  zrb/xcom/xcom.py,sha256=o79rxR9wphnShrcIushA0Qt71d_p3ZTxjNf7x9hJB78,1571
343
- zrb-1.3.1.dist-info/METADATA,sha256=xYoLV2fDDSGBjJdgYOjXNy1rfAifx7VsGxIAGHCpBns,6303
344
- zrb-1.3.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
345
- zrb-1.3.1.dist-info/entry_points.txt,sha256=-Pg3ElWPfnaSM-XvXqCxEAa-wfVI6BEgcs386s8C8v8,46
346
- zrb-1.3.1.dist-info/RECORD,,
344
+ zrb-1.4.0.dist-info/METADATA,sha256=9IBDp38-en6FVpqq2RUd4qMo7c138HImysJXzd6-kwg,6291
345
+ zrb-1.4.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
346
+ zrb-1.4.0.dist-info/entry_points.txt,sha256=-Pg3ElWPfnaSM-XvXqCxEAa-wfVI6BEgcs386s8C8v8,46
347
+ zrb-1.4.0.dist-info/RECORD,,
File without changes