roast-ai 0.4.10 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.claude/commands/docs/write-comments.md +36 -0
- data/.github/CODEOWNERS +1 -1
- data/.github/workflows/ci.yaml +10 -6
- data/.gitignore +0 -1
- data/.rubocop.yml +7 -1
- data/.ruby-version +1 -1
- data/CLAUDE.md +2 -2
- data/CONTRIBUTING.md +2 -0
- data/Gemfile +19 -18
- data/Gemfile.lock +35 -58
- data/README.md +118 -1432
- data/README_LEGACY.md +1464 -0
- data/Rakefile +39 -4
- data/dev.yml +29 -0
- data/dsl/agent_sessions.rb +20 -0
- data/dsl/async_cogs.rb +49 -0
- data/dsl/async_cogs_complex.rb +67 -0
- data/dsl/call.rb +44 -0
- data/dsl/collect_from.rb +72 -0
- data/dsl/json_output.rb +28 -0
- data/dsl/map.rb +55 -0
- data/dsl/map_reduce.rb +37 -0
- data/dsl/map_with_index.rb +49 -0
- data/dsl/next_break.rb +45 -0
- data/dsl/next_break_parallel.rb +44 -0
- data/dsl/outputs.rb +39 -0
- data/dsl/outputs_bang.rb +36 -0
- data/dsl/parallel_map.rb +37 -0
- data/dsl/prompts/simple_prompt.md.erb +3 -0
- data/dsl/prototype.rb +5 -7
- data/dsl/repeat_loop_results.rb +53 -0
- data/dsl/ruby_cog.rb +72 -0
- data/dsl/simple_agent.rb +18 -0
- data/dsl/simple_chat.rb +15 -1
- data/dsl/simple_repeat.rb +29 -0
- data/dsl/skip.rb +36 -0
- data/dsl/step_communication.rb +2 -3
- data/dsl/targets_and_params.rb +57 -0
- data/dsl/temperature.rb +17 -0
- data/dsl/temporary_directory.rb +22 -0
- data/dsl/tutorial/01_your_first_workflow/README.md +179 -0
- data/dsl/tutorial/01_your_first_workflow/configured_chat.rb +33 -0
- data/dsl/tutorial/01_your_first_workflow/hello.rb +23 -0
- data/dsl/tutorial/02_chaining_cogs/README.md +310 -0
- data/dsl/tutorial/02_chaining_cogs/code_review.rb +104 -0
- data/dsl/tutorial/02_chaining_cogs/session_resumption.rb +92 -0
- data/dsl/tutorial/02_chaining_cogs/simple_chain.rb +84 -0
- data/dsl/tutorial/03_targets_and_params/README.md +230 -0
- data/dsl/tutorial/03_targets_and_params/multiple_targets.rb +65 -0
- data/dsl/tutorial/03_targets_and_params/single_target.rb +65 -0
- data/dsl/tutorial/04_configuration_options/README.md +209 -0
- data/dsl/tutorial/04_configuration_options/control_display_and_temperature.rb +104 -0
- data/dsl/tutorial/04_configuration_options/simple_config.rb +68 -0
- data/dsl/tutorial/05_control_flow/README.md +156 -0
- data/dsl/tutorial/05_control_flow/conditional_execution.rb +62 -0
- data/dsl/tutorial/05_control_flow/handling_failures.rb +77 -0
- data/dsl/tutorial/06_reusable_scopes/README.md +172 -0
- data/dsl/tutorial/06_reusable_scopes/accessing_scope_outputs.rb +126 -0
- data/dsl/tutorial/06_reusable_scopes/basic_scope.rb +63 -0
- data/dsl/tutorial/06_reusable_scopes/parameterized_scope.rb +78 -0
- data/dsl/tutorial/07_processing_collections/README.md +152 -0
- data/dsl/tutorial/07_processing_collections/basic_map.rb +70 -0
- data/dsl/tutorial/07_processing_collections/parallel_map.rb +74 -0
- data/dsl/tutorial/08_iterative_workflows/README.md +231 -0
- data/dsl/tutorial/08_iterative_workflows/basic_repeat.rb +57 -0
- data/dsl/tutorial/08_iterative_workflows/conditional_break.rb +57 -0
- data/dsl/tutorial/09_async_cogs/README.md +197 -0
- data/dsl/tutorial/09_async_cogs/basic_async.rb +38 -0
- data/dsl/tutorial/README.md +222 -0
- data/dsl/working_directory.rb +16 -0
- data/exe/roast +1 -1
- data/internal/documentation/architectural-notes.md +115 -0
- data/internal/documentation/doc-comments-external.md +686 -0
- data/internal/documentation/doc-comments-internal.md +342 -0
- data/internal/documentation/doc-comments.md +211 -0
- data/lib/roast/dsl/cog/config.rb +274 -3
- data/lib/roast/dsl/cog/input.rb +53 -10
- data/lib/roast/dsl/cog/output.rb +297 -8
- data/lib/roast/dsl/cog/registry.rb +35 -3
- data/lib/roast/dsl/cog/stack.rb +1 -1
- data/lib/roast/dsl/cog/store.rb +5 -5
- data/lib/roast/dsl/cog.rb +70 -14
- data/lib/roast/dsl/cog_input_context.rb +36 -1
- data/lib/roast/dsl/cog_input_manager.rb +116 -7
- data/lib/roast/dsl/cogs/agent/config.rb +465 -0
- data/lib/roast/dsl/cogs/agent/input.rb +81 -0
- data/lib/roast/dsl/cogs/agent/output.rb +59 -0
- data/lib/roast/dsl/cogs/agent/provider.rb +51 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/claude_invocation.rb +185 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/message.rb +73 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/messages/assistant_message.rb +36 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/messages/result_message.rb +61 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/messages/system_message.rb +47 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/messages/text_message.rb +36 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/messages/tool_result_message.rb +47 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/messages/tool_use_message.rb +46 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/messages/unknown_message.rb +27 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/messages/user_message.rb +37 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/tool_result.rb +51 -0
- data/lib/roast/dsl/cogs/agent/providers/claude/tool_use.rb +48 -0
- data/lib/roast/dsl/cogs/agent/providers/claude.rb +31 -0
- data/lib/roast/dsl/cogs/agent/stats.rb +92 -0
- data/lib/roast/dsl/cogs/agent/usage.rb +62 -0
- data/lib/roast/dsl/cogs/agent.rb +75 -0
- data/lib/roast/dsl/cogs/chat/config.rb +453 -0
- data/lib/roast/dsl/cogs/chat/input.rb +92 -0
- data/lib/roast/dsl/cogs/chat/output.rb +64 -0
- data/lib/roast/dsl/cogs/chat/session.rb +68 -0
- data/lib/roast/dsl/cogs/chat.rb +59 -56
- data/lib/roast/dsl/cogs/cmd.rb +251 -61
- data/lib/roast/dsl/cogs/ruby.rb +171 -0
- data/lib/roast/dsl/command_runner.rb +191 -0
- data/lib/roast/dsl/config_manager.rb +58 -11
- data/lib/roast/dsl/control_flow.rb +41 -0
- data/lib/roast/dsl/execution_manager.rb +162 -32
- data/lib/roast/dsl/nil_assertions.rb +23 -0
- data/lib/roast/dsl/system_cog/params.rb +32 -0
- data/lib/roast/dsl/system_cog.rb +36 -0
- data/lib/roast/dsl/system_cogs/call.rb +163 -0
- data/lib/roast/dsl/system_cogs/map.rb +454 -0
- data/lib/roast/dsl/system_cogs/repeat.rb +242 -0
- data/lib/roast/dsl/workflow.rb +26 -16
- data/lib/roast/dsl/workflow_context.rb +20 -0
- data/lib/roast/dsl/workflow_params.rb +24 -0
- data/lib/roast/helpers/minitest_coverage_runner.rb +1 -1
- data/lib/roast/sorbet_runtime_stub.rb +154 -0
- data/lib/roast/tools/apply_diff.rb +1 -3
- data/lib/roast/tools/cmd.rb +4 -3
- data/lib/roast/tools/read_file.rb +1 -1
- data/lib/roast/tools/update_files.rb +1 -1
- data/lib/roast/tools/write_file.rb +1 -1
- data/lib/roast/version.rb +1 -1
- data/lib/roast/workflow/base_workflow.rb +4 -0
- data/lib/roast/workflow/step_loader.rb +14 -2
- data/lib/roast-ai.rb +4 -0
- data/lib/roast.rb +58 -21
- data/{roast.gemspec → roast-ai.gemspec} +9 -13
- data/sorbet/rbi/gems/async@2.34.0.rbi +1577 -0
- data/sorbet/rbi/gems/cli-kit@5.2.0.rbi +2063 -0
- data/sorbet/rbi/gems/{cli-ui@2.3.0.rbi → cli-ui@2.7.0-6bdefd1d06305e5d6ae312ac76f9c88f88658dda.rbi} +1418 -1013
- data/sorbet/rbi/gems/console@1.34.2.rbi +1193 -0
- data/sorbet/rbi/gems/fiber-annotation@0.2.0.rbi +50 -0
- data/sorbet/rbi/gems/fiber-local@1.1.0.rbi +35 -0
- data/sorbet/rbi/gems/fiber-storage@1.0.1.rbi +41 -0
- data/sorbet/rbi/gems/io-event@1.14.0.rbi +724 -0
- data/sorbet/rbi/gems/metrics@0.15.0.rbi +9 -0
- data/sorbet/rbi/gems/traces@0.18.2.rbi +9 -0
- data/sorbet/rbi/shims/lib/roast/dsl/cog_input_context.rbi +1185 -5
- data/sorbet/rbi/shims/lib/roast/dsl/config_context.rbi +311 -5
- data/sorbet/rbi/shims/lib/roast/dsl/execution_context.rbi +486 -5
- data/sorbet/tapioca/config.yml +6 -0
- data/sorbet/tapioca/require.rb +2 -0
- metadata +157 -30
- data/dsl/less_simple.rb +0 -112
- data/dsl/scoped_executors.rb +0 -28
- data/dsl/simple.rb +0 -8
- data/lib/roast/dsl/cogs/execute.rb +0 -46
- data/lib/roast/dsl/cogs/graph.rb +0 -53
- data/sorbet/rbi/gems/cgi@0.5.0.rbi +0 -2961
- data/sorbet/rbi/gems/claude_swarm@0.1.19.rbi +0 -568
- data/sorbet/rbi/gems/cli-kit@5.0.1.rbi +0 -1991
- data/sorbet/rbi/gems/dry-configurable@1.3.0.rbi +0 -672
- data/sorbet/rbi/gems/dry-core@1.1.0.rbi +0 -1894
- data/sorbet/rbi/gems/dry-inflector@1.2.0.rbi +0 -659
- data/sorbet/rbi/gems/dry-initializer@3.2.0.rbi +0 -781
- data/sorbet/rbi/gems/dry-logic@1.6.0.rbi +0 -1127
- data/sorbet/rbi/gems/dry-schema@1.14.1.rbi +0 -3727
- data/sorbet/rbi/gems/dry-types@1.8.3.rbi +0 -3969
- data/sorbet/rbi/gems/fast-mcp-annotations@1.5.3.rbi +0 -1588
- data/sorbet/rbi/gems/mime-types-data@3.2025.0617.rbi +0 -136
- data/sorbet/rbi/gems/mime-types@3.7.0.rbi +0 -1342
- data/sorbet/rbi/gems/rack@2.2.19.rbi +0 -5676
- data/sorbet/rbi/gems/yard-sorbet@0.9.0.rbi +0 -435
- data/sorbet/rbi/gems/yard@0.9.37.rbi +0 -18492
|
@@ -0,0 +1,453 @@
|
|
|
1
|
+
# typed: true
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
module Roast
|
|
5
|
+
module DSL
|
|
6
|
+
module Cogs
|
|
7
|
+
class Chat < Cog
|
|
8
|
+
class Config < Cog::Config
|
|
9
|
+
PROVIDERS = {
|
|
10
|
+
openai: {
|
|
11
|
+
api_key_env_var: "OPENAI_API_KEY",
|
|
12
|
+
base_url_env_var: "OPENAI_API_BASE",
|
|
13
|
+
default_base_url: "https://api.openai.com/v1",
|
|
14
|
+
default_model: "gpt-4o-mini",
|
|
15
|
+
},
|
|
16
|
+
}.freeze #: Hash[Symbol, Hash[Symbol, String]]
|
|
17
|
+
|
|
18
|
+
# Configure the cog to use a specified API provider when invoking the llm
|
|
19
|
+
#
|
|
20
|
+
# #### See Also
|
|
21
|
+
# - `use_default_provider!`
|
|
22
|
+
#
|
|
23
|
+
#: (Symbol) -> void
|
|
24
|
+
def provider(provider)
|
|
25
|
+
@values[:provider] = provider
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# Configure the cog to use the default provider when invoking the llm
|
|
29
|
+
#
|
|
30
|
+
# The default LLM provider used by Roast is OpenAI (`:openai`).
|
|
31
|
+
#
|
|
32
|
+
# #### See Also
|
|
33
|
+
# - `provider`
|
|
34
|
+
#
|
|
35
|
+
#: () -> void
|
|
36
|
+
def use_default_provider!
|
|
37
|
+
@values[:provider] = nil
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# Get the validated provider name that the cog is configured to use when invoking the llm
|
|
41
|
+
#
|
|
42
|
+
# Note: this method will return the name of a valid provider or raise an `InvalidConfigError`.
|
|
43
|
+
# It will __not__, however, validate that the you have access to the provider's API.
|
|
44
|
+
# If you have not correctly configured API access, you will likely experience a failure when Roast attempts to
|
|
45
|
+
# run your workflow.
|
|
46
|
+
#
|
|
47
|
+
# #### See Also
|
|
48
|
+
# - `provider`
|
|
49
|
+
# - `use_default_provider!`
|
|
50
|
+
#
|
|
51
|
+
#: () -> Symbol
|
|
52
|
+
def valid_provider!
|
|
53
|
+
provider = @values[:provider] || PROVIDERS.keys.first
|
|
54
|
+
unless PROVIDERS.include?(provider)
|
|
55
|
+
raise ArgumentError, "'#{provider}' is not a valid provider. Available providers include: #{PROVIDERS.keys.join(", ")}"
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
provider
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
# Configure the cog to use a specific API key when invoking the llm
|
|
62
|
+
#
|
|
63
|
+
# By default, the cog will use the value specified in a provider-specific environment variable, if present.
|
|
64
|
+
#
|
|
65
|
+
# #### See Also
|
|
66
|
+
# - `use_api_key_from_environment!`
|
|
67
|
+
#
|
|
68
|
+
#: (String) -> void
|
|
69
|
+
def api_key(key)
|
|
70
|
+
@values[:api_key] = key
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
# Remove any explicit api key that the cog was configured to use when invoking the llm
|
|
74
|
+
#
|
|
75
|
+
# The cog will fall back to the value specified in a provider-specific environment variable, if present.
|
|
76
|
+
#
|
|
77
|
+
# #### Environment Variables
|
|
78
|
+
# - OpenAI Provider: OPENAI_API_KEY
|
|
79
|
+
#
|
|
80
|
+
# #### See Also
|
|
81
|
+
# - `api_key`
|
|
82
|
+
#
|
|
83
|
+
#: () -> void
|
|
84
|
+
def use_api_key_from_environment!
|
|
85
|
+
@values.delete(:api_key)
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
# Get the validated, configured value of the API key the cog is configured to use when invoking the llm
|
|
89
|
+
#
|
|
90
|
+
# This method will raise InvalidConfigError if no api key was provided, neither explicitly nor
|
|
91
|
+
# via a provider-specific environment variable.
|
|
92
|
+
#
|
|
93
|
+
# #### Environment Variables
|
|
94
|
+
# - OpenAI Provider: OPENAI_API_KEY
|
|
95
|
+
#
|
|
96
|
+
# #### See Also
|
|
97
|
+
# - `api_key`
|
|
98
|
+
# - `use_api_key_from_environment!`
|
|
99
|
+
#
|
|
100
|
+
#: () -> String
|
|
101
|
+
def valid_api_key!
|
|
102
|
+
value = @values.fetch(:api_key, ENV[PROVIDERS.dig(valid_provider!, :api_key_env_var).not_nil!])
|
|
103
|
+
raise InvalidConfigError, "no api key provided" unless value
|
|
104
|
+
|
|
105
|
+
value
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
# Configure the cog to use a specific API base URL when invoking the llm
|
|
109
|
+
#
|
|
110
|
+
# Default value:
|
|
111
|
+
# - The value specified in provider-specific environment variable, if present;
|
|
112
|
+
# - A provider-specific default, otherwise.
|
|
113
|
+
#
|
|
114
|
+
# #### See Also
|
|
115
|
+
# - `use_default_base_url!`
|
|
116
|
+
#
|
|
117
|
+
#: (String) -> void
|
|
118
|
+
def base_url(key)
|
|
119
|
+
@values[:base_url] = key
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
# Remove any explicit API base URL that the cog was configured to use when invoking the llm
|
|
123
|
+
#
|
|
124
|
+
# The cog will fall back to a default value determined as follows:
|
|
125
|
+
# - The value specified in provider-specific environment variable, if present;
|
|
126
|
+
# - A provider-specific default, otherwise.
|
|
127
|
+
#
|
|
128
|
+
# #### Environment Variables
|
|
129
|
+
# - OpenAI Provider: OPENAI_API_BASE
|
|
130
|
+
#
|
|
131
|
+
# #### See Also
|
|
132
|
+
# - `base_url`
|
|
133
|
+
#
|
|
134
|
+
#: () -> void
|
|
135
|
+
def use_default_base_url!
|
|
136
|
+
@values[:base_url] = nil
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
# Get the validated, configured value of the API base URL the cog is configured to use when invoking the llm
|
|
140
|
+
#
|
|
141
|
+
# #### Environment Variables
|
|
142
|
+
# - OpenAI Provider: OPENAI_API_BASE
|
|
143
|
+
#
|
|
144
|
+
# #### See Also
|
|
145
|
+
# - `base_url`
|
|
146
|
+
# - `use_default_base_url!`
|
|
147
|
+
#
|
|
148
|
+
#: () -> String
|
|
149
|
+
def valid_base_url
|
|
150
|
+
@values.fetch(:api_key, ENV[PROVIDERS.dig(valid_provider!, :base_url_env_var).not_nil!]) ||
|
|
151
|
+
PROVIDERS.dig(valid_provider!, :default_base_url)
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
# Configure the cog to use a specific model when invoking the agent
|
|
155
|
+
#
|
|
156
|
+
# The model name format is provider-specific.
|
|
157
|
+
#
|
|
158
|
+
# #### See Also
|
|
159
|
+
# - `use_default_model!`
|
|
160
|
+
#
|
|
161
|
+
#: (String) -> void
|
|
162
|
+
def model(model)
|
|
163
|
+
@values[:model] = model
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
# Configure the cog to use the provider's default model when invoking the agent
|
|
167
|
+
#
|
|
168
|
+
# Note: the default model will be different for different providers.
|
|
169
|
+
#
|
|
170
|
+
# #### See Also
|
|
171
|
+
# - `model`
|
|
172
|
+
#
|
|
173
|
+
#: () -> void
|
|
174
|
+
def use_default_model!
|
|
175
|
+
@values[:model] = nil
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
# Get the validated, configured value of the model the cog is configured to use when running the agent
|
|
179
|
+
#
|
|
180
|
+
# Returns the provider's default model if no model was explicitly configured.
|
|
181
|
+
#
|
|
182
|
+
# #### See Also
|
|
183
|
+
# - `model`
|
|
184
|
+
# - `use_default_model!`
|
|
185
|
+
#
|
|
186
|
+
#: () -> String?
|
|
187
|
+
def valid_model
|
|
188
|
+
@values.fetch(:model, PROVIDERS.dig(valid_provider!, :default_model))
|
|
189
|
+
end
|
|
190
|
+
|
|
191
|
+
# Configure the cog to use a specific temperature when invoking the llm
|
|
192
|
+
#
|
|
193
|
+
# Temperature controls the randomness of the model's responses:
|
|
194
|
+
# - Low (0.0-0.3): More deterministic and focused responses
|
|
195
|
+
# - Medium (0.4-0.7): Balanced creativity and coherence
|
|
196
|
+
# - High (0.8-1.0): More creative and varied responses
|
|
197
|
+
#
|
|
198
|
+
# #### See Also
|
|
199
|
+
# - `use_default_temperature!`
|
|
200
|
+
#
|
|
201
|
+
#: (Float) -> void
|
|
202
|
+
def temperature(value)
|
|
203
|
+
if value < 0.0 || value > 1.0
|
|
204
|
+
raise ArgumentError, "temperature must be between 0.0 and 1.0, got #{value}"
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
@values[:temperature] = value.to_f
|
|
208
|
+
end
|
|
209
|
+
|
|
210
|
+
# Remove any explicit temperature configuration
|
|
211
|
+
#
|
|
212
|
+
# The cog will fall back to the provider's default temperature.
|
|
213
|
+
#
|
|
214
|
+
# #### See Also
|
|
215
|
+
# - `temperature`
|
|
216
|
+
#
|
|
217
|
+
#: () -> void
|
|
218
|
+
def use_default_temperature!
|
|
219
|
+
@values.delete(:temperature)
|
|
220
|
+
end
|
|
221
|
+
|
|
222
|
+
# Get the validated, configured temperature value
|
|
223
|
+
#
|
|
224
|
+
# Returns `nil` if no temperature was explicitly configured,
|
|
225
|
+
# which means the provider will use its default.
|
|
226
|
+
#
|
|
227
|
+
# #### See Also
|
|
228
|
+
# - `temperature`
|
|
229
|
+
# - `use_default_temperature!`
|
|
230
|
+
#
|
|
231
|
+
#: () -> Float?
|
|
232
|
+
def valid_temperature
|
|
233
|
+
@values[:temperature]
|
|
234
|
+
end
|
|
235
|
+
|
|
236
|
+
# Configure the cog to verify that the model exists on the provider before attempting to invoke it
|
|
237
|
+
#
|
|
238
|
+
# Disabled by default.
|
|
239
|
+
#
|
|
240
|
+
# #### See Also
|
|
241
|
+
# - `no_verify_model_exists!`
|
|
242
|
+
# - `assume_model_exists!`
|
|
243
|
+
# - `verify_model_exists?`
|
|
244
|
+
#
|
|
245
|
+
#: () -> void
|
|
246
|
+
def verify_model_exists!
|
|
247
|
+
@values[:verify_model_exists] = true
|
|
248
|
+
end
|
|
249
|
+
|
|
250
|
+
# Configure the cog __not__ to verify that the model exists on the provider before attempting to invoke it
|
|
251
|
+
#
|
|
252
|
+
# This is the default behaviour.
|
|
253
|
+
#
|
|
254
|
+
# #### See Also
|
|
255
|
+
# - `verify_model_exists!`
|
|
256
|
+
# - `assume_model_exists!`
|
|
257
|
+
# - `verify_model_exists?`
|
|
258
|
+
#
|
|
259
|
+
#: () -> void
|
|
260
|
+
def no_verify_model_exists!
|
|
261
|
+
@values[:verify_model_exists] = false
|
|
262
|
+
end
|
|
263
|
+
|
|
264
|
+
# Check if the cog is configured to verify that the model exists on the provider
|
|
265
|
+
#
|
|
266
|
+
# #### See Also
|
|
267
|
+
# - `verify_model_exists!`
|
|
268
|
+
# - `no_verify_model_exists!`
|
|
269
|
+
# - `assume_model_exists!`
|
|
270
|
+
# - `verify_model_exists?`
|
|
271
|
+
#
|
|
272
|
+
#: () -> bool
|
|
273
|
+
def verify_model_exists?
|
|
274
|
+
@values.fetch(:verify_model_exists, false)
|
|
275
|
+
end
|
|
276
|
+
|
|
277
|
+
# Configure the cog to display the prompt when invoking the llm
|
|
278
|
+
#
|
|
279
|
+
# Disabled by default.
|
|
280
|
+
#
|
|
281
|
+
# #### See Also
|
|
282
|
+
# - `no_show_prompt!`
|
|
283
|
+
# - `show_prompt?`
|
|
284
|
+
# - `display!`
|
|
285
|
+
#
|
|
286
|
+
#: () -> void
|
|
287
|
+
def show_prompt!
|
|
288
|
+
@values[:show_prompt] = true
|
|
289
|
+
end
|
|
290
|
+
|
|
291
|
+
# Configure the cog __not__ to display the prompt when invoking the llm
|
|
292
|
+
#
|
|
293
|
+
# This is the default behaviour.
|
|
294
|
+
#
|
|
295
|
+
# #### See Also
|
|
296
|
+
# - `show_prompt!`
|
|
297
|
+
# - `show_prompt?`
|
|
298
|
+
# - `no_display!`
|
|
299
|
+
# - `quiet!`
|
|
300
|
+
#
|
|
301
|
+
#: () -> void
|
|
302
|
+
def no_show_prompt!
|
|
303
|
+
@values[:show_prompt] = false
|
|
304
|
+
end
|
|
305
|
+
|
|
306
|
+
# Check if the cog is configured to display the prompt when invoking the llm
|
|
307
|
+
#
|
|
308
|
+
# #### See Also
|
|
309
|
+
# - `show_prompt!`
|
|
310
|
+
# - `no_show_prompt!`
|
|
311
|
+
#
|
|
312
|
+
#: () -> bool
|
|
313
|
+
def show_prompt?
|
|
314
|
+
@values.fetch(:show_prompt, false)
|
|
315
|
+
end
|
|
316
|
+
|
|
317
|
+
# Configure the cog to display the llm's final response
|
|
318
|
+
#
|
|
319
|
+
# Enabled by default.
|
|
320
|
+
#
|
|
321
|
+
# #### See Also
|
|
322
|
+
# - `no_show_response!`
|
|
323
|
+
# - `show_response?`
|
|
324
|
+
# - `display!`
|
|
325
|
+
#
|
|
326
|
+
#: () -> void
|
|
327
|
+
def show_response!
|
|
328
|
+
@values[:show_response] = true
|
|
329
|
+
end
|
|
330
|
+
|
|
331
|
+
# Configure the cog __not__ to display the llm's final response
|
|
332
|
+
#
|
|
333
|
+
# #### See Also
|
|
334
|
+
# - `show_response!`
|
|
335
|
+
# - `show_response?`
|
|
336
|
+
# - `no_display!`
|
|
337
|
+
# - `quiet!`
|
|
338
|
+
#
|
|
339
|
+
#: () -> void
|
|
340
|
+
def no_show_response!
|
|
341
|
+
@values[:show_response] = false
|
|
342
|
+
end
|
|
343
|
+
|
|
344
|
+
# Check if the cog is configured to display the llm's final response
|
|
345
|
+
#
|
|
346
|
+
# #### See Also
|
|
347
|
+
# - `show_response!`
|
|
348
|
+
# - `no_show_response!`
|
|
349
|
+
#
|
|
350
|
+
#: () -> bool
|
|
351
|
+
def show_response?
|
|
352
|
+
@values.fetch(:show_response, true)
|
|
353
|
+
end
|
|
354
|
+
|
|
355
|
+
# Configure the cog to display statistics about the llm's operation
|
|
356
|
+
#
|
|
357
|
+
# Enabled by default.
|
|
358
|
+
#
|
|
359
|
+
# #### See Also
|
|
360
|
+
# - `no_show_stats!`
|
|
361
|
+
# - `show_stats?`
|
|
362
|
+
# - `display!`
|
|
363
|
+
#
|
|
364
|
+
#: () -> void
|
|
365
|
+
def show_stats!
|
|
366
|
+
@values[:show_stats] = true
|
|
367
|
+
end
|
|
368
|
+
|
|
369
|
+
# Configure the cog __not__ to display statistics about the llm's operation
|
|
370
|
+
#
|
|
371
|
+
# #### See Also
|
|
372
|
+
# - `show_stats!`
|
|
373
|
+
# - `show_stats?`
|
|
374
|
+
# - `no_display!`
|
|
375
|
+
# - `quiet!`
|
|
376
|
+
#
|
|
377
|
+
#: () -> void
|
|
378
|
+
def no_show_stats!
|
|
379
|
+
@values[:show_stats] = false
|
|
380
|
+
end
|
|
381
|
+
|
|
382
|
+
# Check if the cog is configured to display statistics about the llm's operation
|
|
383
|
+
#
|
|
384
|
+
# #### See Also
|
|
385
|
+
# - `show_stats!`
|
|
386
|
+
# - `no_show_stats!`
|
|
387
|
+
#
|
|
388
|
+
#: () -> bool
|
|
389
|
+
def show_stats?
|
|
390
|
+
@values.fetch(:show_stats, true)
|
|
391
|
+
end
|
|
392
|
+
|
|
393
|
+
# Configure the cog to display all llm output
|
|
394
|
+
#
|
|
395
|
+
# This enables `show_prompt!`, `show_response!`, and `show_stats!`.
|
|
396
|
+
#
|
|
397
|
+
# #### See Also
|
|
398
|
+
# - `no_display!`
|
|
399
|
+
# - `quiet!`
|
|
400
|
+
# - `show_prompt!`
|
|
401
|
+
# - `show_response!`
|
|
402
|
+
# - `show_stats!`
|
|
403
|
+
#
|
|
404
|
+
#: () -> void
|
|
405
|
+
def display!
|
|
406
|
+
show_prompt!
|
|
407
|
+
show_response!
|
|
408
|
+
show_stats!
|
|
409
|
+
end
|
|
410
|
+
|
|
411
|
+
# Configure the cog to __hide__ all llm output
|
|
412
|
+
#
|
|
413
|
+
# This enables `no_show_prompt!`, `no_show_response!`, and `no_show_stats!`.
|
|
414
|
+
#
|
|
415
|
+
# #### Alias Methods
|
|
416
|
+
# - `no_display!`
|
|
417
|
+
# - `quiet!`
|
|
418
|
+
#
|
|
419
|
+
# #### See Also
|
|
420
|
+
# - `display!`
|
|
421
|
+
# - `quiet!`
|
|
422
|
+
# - `no_show_prompt!`
|
|
423
|
+
# - `no_show_response!`
|
|
424
|
+
# - `no_show_stats!`
|
|
425
|
+
#
|
|
426
|
+
#: () -> void
|
|
427
|
+
def no_display!
|
|
428
|
+
no_show_prompt!
|
|
429
|
+
no_show_response!
|
|
430
|
+
no_show_stats!
|
|
431
|
+
end
|
|
432
|
+
|
|
433
|
+
# Check if the cog is configured to display any output while running
|
|
434
|
+
#
|
|
435
|
+
# #### See Also
|
|
436
|
+
# - `display!`
|
|
437
|
+
# - `no_display!`
|
|
438
|
+
# - `show_prompt?`
|
|
439
|
+
# - `show_response?`
|
|
440
|
+
# - `show_stats?`
|
|
441
|
+
#
|
|
442
|
+
#: () -> bool
|
|
443
|
+
def display?
|
|
444
|
+
show_prompt? || show_response? || show_stats?
|
|
445
|
+
end
|
|
446
|
+
|
|
447
|
+
alias_method(:quiet!, :no_display!)
|
|
448
|
+
alias_method(:assume_model_exists!, :no_verify_model_exists!)
|
|
449
|
+
end
|
|
450
|
+
end
|
|
451
|
+
end
|
|
452
|
+
end
|
|
453
|
+
end
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
# typed: true
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
module Roast
|
|
5
|
+
module DSL
|
|
6
|
+
module Cogs
|
|
7
|
+
class Chat < Cog
|
|
8
|
+
# Input specification for the chat cog
|
|
9
|
+
#
|
|
10
|
+
# The chat cog requires a prompt that will be sent to the language model for processing.
|
|
11
|
+
# This enables single-turn interactions with the LLM without maintaining conversation context.
|
|
12
|
+
class Input < Cog::Input
|
|
13
|
+
# The prompt to send to the language model for processing
|
|
14
|
+
#
|
|
15
|
+
# #### Notes
|
|
16
|
+
# The chat cog does not maintain any conversational context with the LLM provider.
|
|
17
|
+
# If you want the LLM to be aware of previous conversational history, you must provide the full
|
|
18
|
+
# transcript (or relevant subset) in the prompt.
|
|
19
|
+
#
|
|
20
|
+
#: String?
|
|
21
|
+
attr_accessor :prompt
|
|
22
|
+
|
|
23
|
+
# Optional session identifier for maintaining conversation context
|
|
24
|
+
#
|
|
25
|
+
# When provided, the chat cog will use this session to maintain context across
|
|
26
|
+
# multiple invocations, allowing for conversational interactions.
|
|
27
|
+
#
|
|
28
|
+
# The chat cog will fork a new session from this point, so multiple conversations can be resumed
|
|
29
|
+
# from the same session state.
|
|
30
|
+
#
|
|
31
|
+
#: Session?
|
|
32
|
+
attr_accessor :session
|
|
33
|
+
|
|
34
|
+
# Validate that the input has all required parameters
|
|
35
|
+
#
|
|
36
|
+
# This method ensures that a prompt has been provided before the chat cog executes.
|
|
37
|
+
#
|
|
38
|
+
# #### See Also
|
|
39
|
+
# - `coerce`
|
|
40
|
+
#
|
|
41
|
+
#: () -> void
|
|
42
|
+
def validate!
|
|
43
|
+
valid_prompt!
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
# Coerce the input from the return value of the input block
|
|
47
|
+
#
|
|
48
|
+
# If the input block returns a String, it will be used as the prompt value.
|
|
49
|
+
#
|
|
50
|
+
# #### See Also
|
|
51
|
+
# - `validate!`
|
|
52
|
+
#
|
|
53
|
+
#: (untyped) -> void
|
|
54
|
+
def coerce(input_return_value)
|
|
55
|
+
if input_return_value.is_a?(String)
|
|
56
|
+
self.prompt = input_return_value
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
# Get the validated prompt value
|
|
61
|
+
#
|
|
62
|
+
# Returns the prompt if it is present, otherwise raises an `InvalidInputError`.
|
|
63
|
+
#
|
|
64
|
+
# #### See Also
|
|
65
|
+
# - `prompt`
|
|
66
|
+
# - `validate!`
|
|
67
|
+
#
|
|
68
|
+
#: () -> String
|
|
69
|
+
def valid_prompt!
|
|
70
|
+
valid_prompt = @prompt
|
|
71
|
+
raise Cog::Input::InvalidInputError, "'prompt' is required" unless valid_prompt.present?
|
|
72
|
+
|
|
73
|
+
valid_prompt
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
# Get the session value if one was provided
|
|
77
|
+
#
|
|
78
|
+
# Returns the session object if present, otherwise returns `nil`.
|
|
79
|
+
# This method does not raise an error when the session is absent; providing a session is optional.
|
|
80
|
+
#
|
|
81
|
+
# #### See Also
|
|
82
|
+
# - `session`
|
|
83
|
+
#
|
|
84
|
+
#: () -> Session?
|
|
85
|
+
def valid_session
|
|
86
|
+
@session
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
end
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
# typed: true
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
module Roast
|
|
5
|
+
module DSL
|
|
6
|
+
module Cogs
|
|
7
|
+
class Chat < Cog
|
|
8
|
+
# Output from running the chat cog
|
|
9
|
+
#
|
|
10
|
+
# Contains the LLM's response text from a chat completion request.
|
|
11
|
+
# The output provides convenient access to the response as plain text, parsed JSON,
|
|
12
|
+
# or as an array of lines through the included `WithText` and `WithJson` modules.
|
|
13
|
+
class Output < Cog::Output
|
|
14
|
+
include Cog::Output::WithJson
|
|
15
|
+
include Cog::Output::WithNumber
|
|
16
|
+
include Cog::Output::WithText
|
|
17
|
+
|
|
18
|
+
# The LLM's response text
|
|
19
|
+
#
|
|
20
|
+
# This is the complete text response returned by the language model for the chat request.
|
|
21
|
+
# The response can be accessed directly, or through convenience methods like `text`,
|
|
22
|
+
# `lines`, `json`, or `json!` provided by the included modules.
|
|
23
|
+
#
|
|
24
|
+
# #### See Also
|
|
25
|
+
# - `text` (from WithText module)
|
|
26
|
+
# - `lines` (from WithText module)
|
|
27
|
+
# - `json` (from WithJson module)
|
|
28
|
+
# - `json!` (from WithJson module)
|
|
29
|
+
#
|
|
30
|
+
#: String
|
|
31
|
+
attr_reader :response
|
|
32
|
+
|
|
33
|
+
# The session object containing the conversation context
|
|
34
|
+
#
|
|
35
|
+
# This holds a reference to the complete message history needed to resume or continue a conversation
|
|
36
|
+
# with the language model. The session can be passed to subsequent `chat` cog invocations
|
|
37
|
+
# to maintain conversational context.
|
|
38
|
+
#
|
|
39
|
+
# Note: you do __not__ have to use the same model for the entire conversation.
|
|
40
|
+
# You can change models between prompts while maintaining the same session, allowing
|
|
41
|
+
# different models to participate in the same conversation.
|
|
42
|
+
#
|
|
43
|
+
#: Session
|
|
44
|
+
attr_reader :session
|
|
45
|
+
|
|
46
|
+
# Initialize a new chat output with the session and response text
|
|
47
|
+
#
|
|
48
|
+
#: (Session, String) -> void
|
|
49
|
+
def initialize(session, response)
|
|
50
|
+
super()
|
|
51
|
+
@session = session
|
|
52
|
+
@response = response
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
private
|
|
56
|
+
|
|
57
|
+
def raw_text
|
|
58
|
+
response
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
end
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
# typed: true
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
module Roast
|
|
5
|
+
module DSL
|
|
6
|
+
module Cogs
|
|
7
|
+
class Chat < Cog
|
|
8
|
+
# Container for chat session information needed to resume a conversation
|
|
9
|
+
#
|
|
10
|
+
# Holds the messages from a chat conversation and provides methods to
|
|
11
|
+
# truncate or restore the session.
|
|
12
|
+
class Session
|
|
13
|
+
class << self
|
|
14
|
+
# Create a new session from a RubyLLM chat instance
|
|
15
|
+
#
|
|
16
|
+
#: (RubyLLM::Chat) -> Session
|
|
17
|
+
def from_chat(chat)
|
|
18
|
+
messages = chat.messages.deep_dup
|
|
19
|
+
Session.new(messages)
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
# Initialize a new session with the given messages
|
|
24
|
+
#
|
|
25
|
+
#: (Array[RubyLLM::Message]) -> void
|
|
26
|
+
def initialize(messages)
|
|
27
|
+
@messages = messages
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# Get a truncated session consisting only of the first N messages
|
|
31
|
+
#
|
|
32
|
+
# Each full turn in a conversation consists of two messages (a prompt and a response),
|
|
33
|
+
# so to include N full turns you should pass `2 * N` as the argument.
|
|
34
|
+
# The default value is `2`, which returns only the first full turn.
|
|
35
|
+
#
|
|
36
|
+
#: (?Integer) -> Session
|
|
37
|
+
def first(n = 2)
|
|
38
|
+
messages = @messages.first(n).deep_dup
|
|
39
|
+
Session.new(messages)
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
# Get a truncated session consisting only of the last N messages
|
|
43
|
+
#
|
|
44
|
+
# Each full turn in a conversation consists of two messages (a prompt and a response),
|
|
45
|
+
# so to include N full turns you should pass `2 * N` as the argument.
|
|
46
|
+
# The default value is `2`, which returns only the last full turn.
|
|
47
|
+
#
|
|
48
|
+
#: (?Integer) -> Session
|
|
49
|
+
def last(n = 2)
|
|
50
|
+
messages = @messages.last(n).deep_dup
|
|
51
|
+
Session.new(messages)
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
# Apply this session's messages to a RubyLLM chat instance
|
|
55
|
+
#
|
|
56
|
+
# Replaces the chat's messages with this session's messages, effectively
|
|
57
|
+
# restoring the conversation state.
|
|
58
|
+
#
|
|
59
|
+
#: (RubyLLM::Chat) -> void
|
|
60
|
+
def apply!(chat)
|
|
61
|
+
chat.instance_variable_set(:@messages, @messages.deep_dup)
|
|
62
|
+
chat.with_temperature(@temperature) if @temperature
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
end
|