vsm 0.0.1 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.claude/settings.local.json +17 -0
- data/CLAUDE.md +134 -0
- data/README.md +675 -17
- data/Rakefile +1 -5
- data/examples/01_echo_tool.rb +51 -0
- data/examples/02_openai_streaming.rb +73 -0
- data/examples/02b_anthropic_streaming.rb +58 -0
- data/examples/02c_gemini_streaming.rb +60 -0
- data/examples/03_openai_tools.rb +106 -0
- data/examples/03b_anthropic_tools.rb +93 -0
- data/examples/03c_gemini_tools.rb +95 -0
- data/examples/05_mcp_server_and_chattty.rb +63 -0
- data/examples/06_mcp_mount_reflection.rb +45 -0
- data/examples/07_connect_claude_mcp.rb +78 -0
- data/examples/08_custom_chattty.rb +63 -0
- data/examples/09_mcp_with_llm_calls.rb +49 -0
- data/examples/10_meta_read_only.rb +56 -0
- data/exe/vsm +17 -0
- data/lib/vsm/async_channel.rb +44 -0
- data/lib/vsm/capsule.rb +46 -0
- data/lib/vsm/cli.rb +78 -0
- data/lib/vsm/drivers/anthropic/async_driver.rb +210 -0
- data/lib/vsm/drivers/family.rb +16 -0
- data/lib/vsm/drivers/gemini/async_driver.rb +149 -0
- data/lib/vsm/drivers/openai/async_driver.rb +202 -0
- data/lib/vsm/dsl.rb +80 -0
- data/lib/vsm/dsl_mcp.rb +36 -0
- data/lib/vsm/executors/fiber_executor.rb +10 -0
- data/lib/vsm/executors/thread_executor.rb +19 -0
- data/lib/vsm/generator/new_project.rb +154 -0
- data/lib/vsm/generator/templates/Gemfile.erb +9 -0
- data/lib/vsm/generator/templates/README_md.erb +40 -0
- data/lib/vsm/generator/templates/Rakefile.erb +5 -0
- data/lib/vsm/generator/templates/bin_console.erb +11 -0
- data/lib/vsm/generator/templates/bin_setup.erb +7 -0
- data/lib/vsm/generator/templates/exe_name.erb +34 -0
- data/lib/vsm/generator/templates/gemspec.erb +24 -0
- data/lib/vsm/generator/templates/gitignore.erb +10 -0
- data/lib/vsm/generator/templates/lib_name_rb.erb +9 -0
- data/lib/vsm/generator/templates/lib_organism_rb.erb +44 -0
- data/lib/vsm/generator/templates/lib_ports_chat_tty_rb.erb +12 -0
- data/lib/vsm/generator/templates/lib_tools_read_file_rb.erb +32 -0
- data/lib/vsm/generator/templates/lib_version_rb.erb +6 -0
- data/lib/vsm/homeostat.rb +19 -0
- data/lib/vsm/lens/event_hub.rb +73 -0
- data/lib/vsm/lens/server.rb +188 -0
- data/lib/vsm/lens/stats.rb +58 -0
- data/lib/vsm/lens/tui.rb +88 -0
- data/lib/vsm/lens.rb +79 -0
- data/lib/vsm/mcp/client.rb +80 -0
- data/lib/vsm/mcp/jsonrpc.rb +92 -0
- data/lib/vsm/mcp/remote_tool_capsule.rb +35 -0
- data/lib/vsm/message.rb +6 -0
- data/lib/vsm/meta/snapshot_builder.rb +121 -0
- data/lib/vsm/meta/snapshot_cache.rb +25 -0
- data/lib/vsm/meta/support.rb +35 -0
- data/lib/vsm/meta/tools.rb +498 -0
- data/lib/vsm/meta.rb +59 -0
- data/lib/vsm/observability/ledger.rb +25 -0
- data/lib/vsm/port.rb +11 -0
- data/lib/vsm/ports/chat_tty.rb +112 -0
- data/lib/vsm/ports/mcp/server_stdio.rb +101 -0
- data/lib/vsm/roles/coordination.rb +49 -0
- data/lib/vsm/roles/governance.rb +9 -0
- data/lib/vsm/roles/identity.rb +11 -0
- data/lib/vsm/roles/intelligence.rb +172 -0
- data/lib/vsm/roles/operations.rb +33 -0
- data/lib/vsm/runtime.rb +18 -0
- data/lib/vsm/tool/acts_as_tool.rb +20 -0
- data/lib/vsm/tool/capsule.rb +12 -0
- data/lib/vsm/tool/descriptor.rb +16 -0
- data/lib/vsm/version.rb +1 -1
- data/lib/vsm.rb +43 -0
- data/llms.txt +322 -0
- data/mcp_update.md +162 -0
- metadata +93 -31
- data/.rubocop.yml +0 -8
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
require "async"
|
|
3
|
+
require "net/http"
|
|
4
|
+
require "uri"
|
|
5
|
+
require "json"
|
|
6
|
+
require "securerandom"
|
|
7
|
+
|
|
8
|
+
module VSM
|
|
9
|
+
module Drivers
|
|
10
|
+
module Gemini
|
|
11
|
+
class AsyncDriver
|
|
12
|
+
def initialize(api_key:, model:, base_url: "https://generativelanguage.googleapis.com/v1beta", streaming: true)
|
|
13
|
+
@api_key, @model, @base, @streaming = api_key, model, base_url, streaming
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def run!(conversation:, tools:, policy: {}, &emit)
|
|
17
|
+
contents = to_gemini_contents(conversation)
|
|
18
|
+
fndecls = normalize_gemini_tools(tools)
|
|
19
|
+
if @streaming
|
|
20
|
+
uri = URI.parse("#{@base}/models/#{@model}:streamGenerateContent?alt=sse&key=#{@api_key}")
|
|
21
|
+
headers = { "content-type" => "application/json", "accept" => "text/event-stream" }
|
|
22
|
+
body = JSON.dump({ contents: contents, system_instruction: (policy[:system_prompt] && { parts: [{ text: policy[:system_prompt] }], role: "user" }), tools: [{ functionDeclarations: fndecls }] })
|
|
23
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
|
24
|
+
http.use_ssl = (uri.scheme == "https")
|
|
25
|
+
req = Net::HTTP::Post.new(uri.request_uri)
|
|
26
|
+
headers.each { |k,v| req[k] = v }
|
|
27
|
+
req.body = body
|
|
28
|
+
http.request(req) do |res|
|
|
29
|
+
if res.code.to_i != 200
|
|
30
|
+
err = +""; res.read_body { |c| err << c }
|
|
31
|
+
emit.call(:assistant_final, "Gemini HTTP #{res.code}: #{err.to_s.byteslice(0, 400)}")
|
|
32
|
+
next
|
|
33
|
+
end
|
|
34
|
+
buffer = +""; text = +""; calls = []
|
|
35
|
+
res.read_body do |chunk|
|
|
36
|
+
buffer << chunk
|
|
37
|
+
while (i = buffer.index("\n"))
|
|
38
|
+
line = buffer.slice!(0..i)
|
|
39
|
+
line.chomp!
|
|
40
|
+
next unless line.start_with?("data:")
|
|
41
|
+
data = line.sub("data:","").strip
|
|
42
|
+
next if data.empty? || data == "[DONE]"
|
|
43
|
+
obj = JSON.parse(data) rescue nil
|
|
44
|
+
next unless obj
|
|
45
|
+
parts = (obj.dig("candidates",0,"content","parts") || [])
|
|
46
|
+
parts.each do |p|
|
|
47
|
+
if (t = p["text"]) && !t.empty?
|
|
48
|
+
text << t
|
|
49
|
+
emit.call(:assistant_delta, t)
|
|
50
|
+
end
|
|
51
|
+
if (fc = p["functionCall"]) && fc["name"]
|
|
52
|
+
calls << { id: SecureRandom.uuid, name: fc["name"], arguments: (fc["args"] || {}) }
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
if calls.any?
|
|
58
|
+
emit.call(:tool_calls, calls)
|
|
59
|
+
else
|
|
60
|
+
emit.call(:assistant_final, text)
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
else
|
|
64
|
+
uri = URI.parse("#{@base}/models/#{@model}:generateContent?key=#{@api_key}")
|
|
65
|
+
headers = { "content-type" => "application/json" }
|
|
66
|
+
body = JSON.dump({ contents: contents, system_instruction: (policy[:system_prompt] && { parts: [{ text: policy[:system_prompt] }], role: "user" }), tools: [{ functionDeclarations: fndecls }] })
|
|
67
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
|
68
|
+
http.use_ssl = (uri.scheme == "https")
|
|
69
|
+
req = Net::HTTP::Post.new(uri.request_uri)
|
|
70
|
+
headers.each { |k,v| req[k] = v }
|
|
71
|
+
req.body = body
|
|
72
|
+
res = http.request(req)
|
|
73
|
+
if res.code.to_i != 200
|
|
74
|
+
emit.call(:assistant_final, "Gemini HTTP #{res.code}")
|
|
75
|
+
else
|
|
76
|
+
data = JSON.parse(res.body) rescue {}
|
|
77
|
+
parts = (data.dig("candidates",0,"content","parts") || [])
|
|
78
|
+
calls = parts.filter_map { |p| fc = p["functionCall"]; fc && { id: SecureRandom.uuid, name: fc["name"], arguments: fc["args"] || {} } }
|
|
79
|
+
if calls.any?
|
|
80
|
+
emit.call(:tool_calls, calls)
|
|
81
|
+
else
|
|
82
|
+
text = parts.filter_map { |p| p["text"] }.join
|
|
83
|
+
emit.call(:assistant_final, text.to_s)
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
:done
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
private
|
|
91
|
+
# (no IPv6/IPv4 forcing; rely on default Internet)
|
|
92
|
+
def normalize_gemini_tools(tools)
|
|
93
|
+
Array(tools).map { |t| normalize_gemini_tool(t) }
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
def normalize_gemini_tool(t)
|
|
97
|
+
return t.to_gemini_tool if t.respond_to?(:to_gemini_tool)
|
|
98
|
+
|
|
99
|
+
# Provider-shaped: { name:, description:, parameters: {…} }
|
|
100
|
+
if t.is_a?(Hash) && (t[:parameters] || t["parameters"])
|
|
101
|
+
return t
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
# Neutral hash {name:, description:, schema:}
|
|
105
|
+
if t.is_a?(Hash) && (t[:name] || t["name"])
|
|
106
|
+
return {
|
|
107
|
+
name: t[:name] || t["name"],
|
|
108
|
+
description: t[:description] || t["description"] || "",
|
|
109
|
+
parameters: t[:schema] || t["schema"] || {}
|
|
110
|
+
}
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
raise TypeError, "unsupported tool descriptor: #{t.inspect}"
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def to_gemini_contents(neutral)
|
|
118
|
+
items = []
|
|
119
|
+
neutral.each do |m|
|
|
120
|
+
case m[:role]
|
|
121
|
+
when "user"
|
|
122
|
+
items << { role: "user", parts: [{ text: m[:content].to_s }] }
|
|
123
|
+
when "assistant"
|
|
124
|
+
items << { role: "model", parts: [{ text: m[:content].to_s }] }
|
|
125
|
+
when "assistant_tool_calls"
|
|
126
|
+
# Gemini doesn't need us to echo previous functionCall(s)
|
|
127
|
+
# Skip: model will remember its own functionCall
|
|
128
|
+
when "tool_result"
|
|
129
|
+
# Provide functionResponse so model can continue
|
|
130
|
+
name = m[:name] || "tool"
|
|
131
|
+
items << { role: "user", parts: [{ functionResponse: { name: name, response: { content: m[:content].to_s } } }] }
|
|
132
|
+
end
|
|
133
|
+
end
|
|
134
|
+
items
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def extract_sse_line!(buffer)
|
|
138
|
+
if (i = buffer.index("\n"))
|
|
139
|
+
line = buffer.slice!(0..i)
|
|
140
|
+
line.chomp!
|
|
141
|
+
return line
|
|
142
|
+
end
|
|
143
|
+
nil
|
|
144
|
+
end
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
end
|
|
149
|
+
|
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
require "async"
|
|
3
|
+
require "async/http/internet"
|
|
4
|
+
require "json"
|
|
5
|
+
|
|
6
|
+
module VSM
|
|
7
|
+
module Drivers
|
|
8
|
+
module OpenAI
|
|
9
|
+
class AsyncDriver
|
|
10
|
+
def initialize(api_key:, model:, base_url: "https://api.openai.com/v1")
|
|
11
|
+
@api_key, @model, @base = api_key, model, base_url
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
MAX_TOOL_TURNS = 8
|
|
15
|
+
|
|
16
|
+
def run!(conversation:, tools:, policy: {}, &emit)
|
|
17
|
+
internet = Async::HTTP::Internet.new
|
|
18
|
+
begin
|
|
19
|
+
headers = {
|
|
20
|
+
"Authorization" => "Bearer #{@api_key}",
|
|
21
|
+
"Content-Type" => "application/json",
|
|
22
|
+
"Accept" => "text/event-stream"
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
messages = to_openai_messages(conversation, policy[:system_prompt])
|
|
26
|
+
tool_list = normalize_openai_tools(tools)
|
|
27
|
+
|
|
28
|
+
req_body = JSON.dump({
|
|
29
|
+
model: @model,
|
|
30
|
+
messages: messages,
|
|
31
|
+
tools: tool_list,
|
|
32
|
+
tool_choice: "auto",
|
|
33
|
+
stream: true
|
|
34
|
+
})
|
|
35
|
+
|
|
36
|
+
# Debug logging
|
|
37
|
+
if ENV["VSM_DEBUG_STREAM"] == "1"
|
|
38
|
+
$stderr.puts "openai => messages: #{JSON.pretty_generate(messages)}"
|
|
39
|
+
$stderr.puts "openai => tools count: #{tool_list.size}"
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
response = internet.post("#{@base}/chat/completions", headers, req_body)
|
|
43
|
+
|
|
44
|
+
if response.status != 200
|
|
45
|
+
body = response.read
|
|
46
|
+
warn "openai HTTP #{response.status}: #{body}"
|
|
47
|
+
emit.call(:assistant_final, "")
|
|
48
|
+
return :done
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
buffer = +""
|
|
52
|
+
text_buffer = +""
|
|
53
|
+
tc_partial = Hash.new { |h,k| h[k] = { id: nil, name: nil, args_str: +"" } }
|
|
54
|
+
|
|
55
|
+
response.body.each do |chunk|
|
|
56
|
+
buffer << chunk
|
|
57
|
+
while (line = extract_sse_line!(buffer))
|
|
58
|
+
next if line.empty? || line.start_with?(":")
|
|
59
|
+
next unless line.start_with?("data:")
|
|
60
|
+
data = line.sub("data:","").strip
|
|
61
|
+
$stderr.puts("openai <= #{data}") if ENV["VSM_DEBUG_STREAM"] == "1"
|
|
62
|
+
next if data == "[DONE]"
|
|
63
|
+
|
|
64
|
+
obj = JSON.parse(data) rescue nil
|
|
65
|
+
next unless obj
|
|
66
|
+
choice = obj.dig("choices",0) || {}
|
|
67
|
+
delta = choice["delta"] || {}
|
|
68
|
+
|
|
69
|
+
if (content = delta["content"])
|
|
70
|
+
text_buffer << content
|
|
71
|
+
emit.call(:assistant_delta, content)
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
if (tcs = delta["tool_calls"])
|
|
75
|
+
tcs.each do |tc|
|
|
76
|
+
idx = tc["index"] || 0
|
|
77
|
+
cell = tc_partial[idx]
|
|
78
|
+
cell[:id] ||= tc["id"]
|
|
79
|
+
fn = tc["function"] || {}
|
|
80
|
+
cell[:name] ||= fn["name"] if fn["name"]
|
|
81
|
+
cell[:args_str] << (fn["arguments"] || "")
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
if (fr = choice["finish_reason"])
|
|
86
|
+
case fr
|
|
87
|
+
when "tool_calls"
|
|
88
|
+
calls = tc_partial.keys.sort.map do |i|
|
|
89
|
+
cell = tc_partial[i]
|
|
90
|
+
{
|
|
91
|
+
id: cell[:id] || "call_#{i}",
|
|
92
|
+
name: cell[:name] || "unknown_tool",
|
|
93
|
+
arguments: safe_json(cell[:args_str])
|
|
94
|
+
}
|
|
95
|
+
end
|
|
96
|
+
tc_partial.clear
|
|
97
|
+
emit.call(:tool_calls, calls)
|
|
98
|
+
when "stop", "length", "content_filter"
|
|
99
|
+
emit.call(:assistant_final, text_buffer.dup)
|
|
100
|
+
text_buffer.clear
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
end
|
|
105
|
+
ensure
|
|
106
|
+
internet.close
|
|
107
|
+
end
|
|
108
|
+
:done
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
private
|
|
112
|
+
def normalize_openai_tools(tools)
|
|
113
|
+
Array(tools).map { |t| normalize_openai_tool(t) }
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
def normalize_openai_tool(t)
|
|
117
|
+
# Case 1: our Descriptor object
|
|
118
|
+
return t.to_openai_tool if t.respond_to?(:to_openai_tool)
|
|
119
|
+
|
|
120
|
+
# Case 2: provider-shaped already (OpenAI tools API)
|
|
121
|
+
if (t.is_a?(Hash) && (t[:type] || t["type"]))
|
|
122
|
+
return t
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
# Case 3: neutral hash {name:, description:, schema:}
|
|
126
|
+
if t.is_a?(Hash) && (t[:name] || t["name"])
|
|
127
|
+
return {
|
|
128
|
+
type: "function",
|
|
129
|
+
function: {
|
|
130
|
+
name: t[:name] || t["name"],
|
|
131
|
+
description: t[:description] || t["description"] || "",
|
|
132
|
+
parameters: t[:schema] || t["schema"] || {}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
raise TypeError, "unsupported tool descriptor: #{t.inspect}"
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def to_openai_messages(neutral, system_prompt)
|
|
142
|
+
msgs = []
|
|
143
|
+
msgs << { role: "system", content: system_prompt } if system_prompt
|
|
144
|
+
neutral.each do |m|
|
|
145
|
+
case m[:role]
|
|
146
|
+
when "user"
|
|
147
|
+
msgs << { role: "user", content: m[:content].to_s }
|
|
148
|
+
when "assistant"
|
|
149
|
+
msgs << { role: "assistant", content: m[:content].to_s }
|
|
150
|
+
when "assistant_tool_calls"
|
|
151
|
+
msg = {
|
|
152
|
+
role: "assistant",
|
|
153
|
+
tool_calls: Array(m[:tool_calls]).map { |c|
|
|
154
|
+
{
|
|
155
|
+
id: c[:id],
|
|
156
|
+
type: "function",
|
|
157
|
+
function: {
|
|
158
|
+
name: c[:name],
|
|
159
|
+
arguments: JSON.dump(c[:arguments] || {})
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
msgs << msg
|
|
165
|
+
if ENV["VSM_DEBUG_STREAM"] == "1"
|
|
166
|
+
$stderr.puts "OpenAI: Converting assistant_tool_calls: #{msg[:tool_calls].map{|tc| "#{tc[:function][:name]}(#{tc[:id]})"}.join(', ')}"
|
|
167
|
+
end
|
|
168
|
+
when "tool_result"
|
|
169
|
+
msg = {
|
|
170
|
+
role: "tool",
|
|
171
|
+
tool_call_id: m[:tool_call_id],
|
|
172
|
+
content: m[:content].to_s
|
|
173
|
+
}
|
|
174
|
+
msgs << msg
|
|
175
|
+
if ENV["VSM_DEBUG_STREAM"] == "1"
|
|
176
|
+
$stderr.puts "OpenAI: Converting tool_result(#{m[:tool_call_id]}): #{m[:content].to_s.slice(0, 100)}"
|
|
177
|
+
end
|
|
178
|
+
end
|
|
179
|
+
end
|
|
180
|
+
msgs
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
def extract_sse_line!(buffer)
|
|
184
|
+
if (i = buffer.index("\n"))
|
|
185
|
+
line = buffer.slice!(0..i)
|
|
186
|
+
line.chomp!
|
|
187
|
+
return line
|
|
188
|
+
end
|
|
189
|
+
nil
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
def safe_json(s)
|
|
193
|
+
return {} if s.nil? || s.empty?
|
|
194
|
+
JSON.parse(s)
|
|
195
|
+
rescue JSON::ParserError
|
|
196
|
+
{ "_raw" => s }
|
|
197
|
+
end
|
|
198
|
+
end
|
|
199
|
+
end
|
|
200
|
+
end
|
|
201
|
+
end
|
|
202
|
+
|
data/lib/vsm/dsl.rb
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "meta"
|
|
4
|
+
module VSM
|
|
5
|
+
module DSL
|
|
6
|
+
class Builder
|
|
7
|
+
def initialize(name)
|
|
8
|
+
@name = name
|
|
9
|
+
@roles = {}
|
|
10
|
+
@children = {}
|
|
11
|
+
@after_build = []
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def identity(klass: VSM::Identity, args: {}) = assign_role(:identity, klass, args)
|
|
15
|
+
def governance(klass: VSM::Governance, args: {}) = assign_role(:governance, klass, args)
|
|
16
|
+
def coordination(klass: VSM::Coordination, args: {}) = assign_role(:coordination, klass, args)
|
|
17
|
+
def intelligence(klass: VSM::Intelligence, args: {}) = assign_role(:intelligence, klass, args)
|
|
18
|
+
def operations(klass: VSM::Operations, args: {}, &blk)
|
|
19
|
+
@roles[:operations] = instantiate(klass, args)
|
|
20
|
+
if blk
|
|
21
|
+
builder = ChildrenBuilder.new(self)
|
|
22
|
+
builder.instance_eval(&blk)
|
|
23
|
+
@children.merge!(builder.result)
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def monitoring(klass: VSM::Monitoring, args: {}) = assign_role(:monitoring, klass, args)
|
|
28
|
+
|
|
29
|
+
def build
|
|
30
|
+
# Inject governance into tool capsules if they accept it
|
|
31
|
+
@children.each_value do |child|
|
|
32
|
+
child.governance = @roles[:governance] if child.respond_to?(:governance=)
|
|
33
|
+
end
|
|
34
|
+
capsule = VSM::Capsule.new(name: @name, roles: @roles, children: @children)
|
|
35
|
+
@after_build.each { _1.call(capsule) }
|
|
36
|
+
capsule
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
class ChildrenBuilder
|
|
40
|
+
def initialize(parent)
|
|
41
|
+
@parent = parent
|
|
42
|
+
@children = {}
|
|
43
|
+
end
|
|
44
|
+
def capsule(name, klass:, args: {})
|
|
45
|
+
instance = klass.new(**args)
|
|
46
|
+
VSM::Meta::Support.record_constructor_args(instance, args)
|
|
47
|
+
@children[name.to_s] = instance
|
|
48
|
+
end
|
|
49
|
+
def meta_tools(prefix: "", only: nil, except: nil)
|
|
50
|
+
@parent.__send__(:after_build) do |capsule|
|
|
51
|
+
VSM::Meta.attach!(capsule, prefix: prefix, only: only, except: except)
|
|
52
|
+
end
|
|
53
|
+
result
|
|
54
|
+
end
|
|
55
|
+
def result = @children
|
|
56
|
+
def method_missing(*) = result
|
|
57
|
+
def respond_to_missing?(*) = true
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
private
|
|
61
|
+
|
|
62
|
+
def after_build(&block)
|
|
63
|
+
@after_build << block if block
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def assign_role(key, klass, args)
|
|
67
|
+
@roles[key] = instantiate(klass, args)
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def instantiate(klass, args)
|
|
71
|
+
instance = klass.new(**args)
|
|
72
|
+
VSM::Meta::Support.record_constructor_args(instance, args)
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
def self.define(name, &blk)
|
|
77
|
+
Builder.new(name).tap { |b| b.instance_eval(&blk) }.build
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
end
|
data/lib/vsm/dsl_mcp.rb
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
require_relative "dsl"
|
|
3
|
+
require_relative "mcp/client"
|
|
4
|
+
require_relative "mcp/remote_tool_capsule"
|
|
5
|
+
|
|
6
|
+
module VSM
|
|
7
|
+
module DSL
|
|
8
|
+
class Builder
|
|
9
|
+
class ChildrenBuilder
|
|
10
|
+
# Reflect tools from a remote MCP server and add them as tool capsules.
|
|
11
|
+
# Options:
|
|
12
|
+
# include: Array<String> whitelist of tool names
|
|
13
|
+
# exclude: Array<String> blacklist of tool names
|
|
14
|
+
# prefix: String prefix for local names to avoid collisions
|
|
15
|
+
# env: Hash environment passed to the server process
|
|
16
|
+
# cwd: Working directory for spawning the process
|
|
17
|
+
#
|
|
18
|
+
# Example:
|
|
19
|
+
# mcp_server :smith, cmd: "smith-server --stdio", include: %w[search read], prefix: "smith_"
|
|
20
|
+
def mcp_server(name, cmd:, env: {}, include: nil, exclude: nil, prefix: nil, cwd: nil)
|
|
21
|
+
client = VSM::MCP::Client.new(cmd: cmd, env: env, cwd: cwd, name: name.to_s).start
|
|
22
|
+
tools = client.list_tools
|
|
23
|
+
tools.each do |t|
|
|
24
|
+
tool_name = t[:name]
|
|
25
|
+
next if include && !Array(include).include?(tool_name)
|
|
26
|
+
next if exclude && Array(exclude).include?(tool_name)
|
|
27
|
+
local_name = [prefix, tool_name].compact.join
|
|
28
|
+
capsule = VSM::MCP::RemoteToolCapsule.new(client: client, remote_name: tool_name, descriptor: t)
|
|
29
|
+
@children[local_name] = capsule
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
module VSM
|
|
3
|
+
module Executors
|
|
4
|
+
module ThreadExecutor
|
|
5
|
+
def self.call(tool, args)
|
|
6
|
+
q = Queue.new
|
|
7
|
+
Thread.new do
|
|
8
|
+
begin
|
|
9
|
+
q << [:ok, tool.run(args)]
|
|
10
|
+
rescue => e
|
|
11
|
+
q << [:err, e]
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
tag, val = q.pop
|
|
15
|
+
tag == :ok ? val : raise(val)
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
end
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'erb'
|
|
4
|
+
require 'fileutils'
|
|
5
|
+
require 'pathname'
|
|
6
|
+
require_relative '../version'
|
|
7
|
+
|
|
8
|
+
module VSM
|
|
9
|
+
module Generator
|
|
10
|
+
class NewProject
|
|
11
|
+
TemplateRoot = File.expand_path('templates', __dir__)
|
|
12
|
+
|
|
13
|
+
def self.run(name:, path: nil, git: false, bundle: false, provider: 'openai', model: nil, force: false)
|
|
14
|
+
new(name: name, path: path, git: git, bundle: bundle, provider: provider, model: model, force: force).run
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def initialize(name:, path:, git:, bundle:, provider:, model:, force:)
|
|
18
|
+
@input_name = name
|
|
19
|
+
@target_dir = File.expand_path(path || name)
|
|
20
|
+
@git = git
|
|
21
|
+
@bundle = bundle
|
|
22
|
+
@provider = provider
|
|
23
|
+
@model = model
|
|
24
|
+
@force = force
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def run
|
|
28
|
+
prepare_target_dir!
|
|
29
|
+
|
|
30
|
+
# Create directory tree
|
|
31
|
+
mkdirs(
|
|
32
|
+
'exe',
|
|
33
|
+
'bin',
|
|
34
|
+
"lib/#{lib_name}",
|
|
35
|
+
"lib/#{lib_name}/ports",
|
|
36
|
+
"lib/#{lib_name}/tools"
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
# Render files
|
|
40
|
+
write('README.md', render('README_md.erb'))
|
|
41
|
+
write('.gitignore', render('gitignore.erb'))
|
|
42
|
+
write('Gemfile', render('Gemfile.erb'))
|
|
43
|
+
write('Rakefile', render('Rakefile.erb'))
|
|
44
|
+
write("#{lib_name}.gemspec", render('gemspec.erb'))
|
|
45
|
+
|
|
46
|
+
write("exe/#{exe_name}", render('exe_name.erb'), mode: 0o755)
|
|
47
|
+
write('bin/console', render('bin_console.erb'), mode: 0o755)
|
|
48
|
+
write('bin/setup', render('bin_setup.erb'), mode: 0o755)
|
|
49
|
+
|
|
50
|
+
write("lib/#{lib_name}.rb", render('lib_name_rb.erb'))
|
|
51
|
+
write("lib/#{lib_name}/version.rb", render('lib_version_rb.erb'))
|
|
52
|
+
write("lib/#{lib_name}/organism.rb", render('lib_organism_rb.erb'))
|
|
53
|
+
write("lib/#{lib_name}/ports/chat_tty.rb", render('lib_ports_chat_tty_rb.erb'))
|
|
54
|
+
write("lib/#{lib_name}/tools/read_file.rb", render('lib_tools_read_file_rb.erb'))
|
|
55
|
+
|
|
56
|
+
post_steps
|
|
57
|
+
|
|
58
|
+
puts <<~DONE
|
|
59
|
+
|
|
60
|
+
Created #{module_name} in #{@target_dir}
|
|
61
|
+
|
|
62
|
+
Next steps:
|
|
63
|
+
cd #{relative_target}
|
|
64
|
+
bundle install
|
|
65
|
+
bundle exec exe/#{exe_name}
|
|
66
|
+
|
|
67
|
+
Add tools in lib/#{lib_name}/tools and customize banner in lib/#{lib_name}/ports/chat_tty.rb.
|
|
68
|
+
DONE
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
private
|
|
72
|
+
|
|
73
|
+
def mkdirs(*dirs)
|
|
74
|
+
dirs.each { |d| FileUtils.mkdir_p(File.join(@target_dir, d)) }
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def write(rel, content, mode: nil)
|
|
78
|
+
full = File.join(@target_dir, rel)
|
|
79
|
+
FileUtils.mkdir_p(File.dirname(full))
|
|
80
|
+
File.write(full, content)
|
|
81
|
+
File.chmod(mode, full) if mode
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
def render(template_name)
|
|
85
|
+
template_path = File.join(TemplateRoot, template_name)
|
|
86
|
+
erb = ERB.new(File.read(template_path), trim_mode: '-')
|
|
87
|
+
erb.result(binding)
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def post_steps
|
|
91
|
+
Dir.chdir(@target_dir) do
|
|
92
|
+
if @git
|
|
93
|
+
system('git', 'init')
|
|
94
|
+
system('git', 'add', '-A')
|
|
95
|
+
system('git', 'commit', '-m', 'init')
|
|
96
|
+
end
|
|
97
|
+
if @bundle
|
|
98
|
+
system('bundle', 'install')
|
|
99
|
+
end
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
def prepare_target_dir!
|
|
104
|
+
if Dir.exist?(@target_dir)
|
|
105
|
+
if !@force && !(Dir.children(@target_dir) - %w[. ..]).empty?
|
|
106
|
+
raise "Target directory already exists and is not empty: #{@target_dir} (use --force to overwrite)"
|
|
107
|
+
end
|
|
108
|
+
else
|
|
109
|
+
FileUtils.mkdir_p(@target_dir)
|
|
110
|
+
end
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
# --- Template helpers (available via binding) ---
|
|
114
|
+
|
|
115
|
+
def module_name
|
|
116
|
+
@module_name ||= @input_name.split(/[-_]/).map { |p| p.gsub(/[^a-zA-Z0-9]/, '').capitalize }.join
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
def lib_name
|
|
120
|
+
@lib_name ||= @input_name.downcase.gsub('-', '_')
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
def exe_name
|
|
124
|
+
@exe_name ||= @input_name.downcase.gsub('_', '-')
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def env_prefix
|
|
128
|
+
@env_prefix ||= @input_name.gsub('-', '_').upcase
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
def vsm_version_constraint
|
|
132
|
+
parts = Vsm::VERSION.split('.')
|
|
133
|
+
"~> #{parts[0]}.#{parts[1]}"
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
def provider
|
|
137
|
+
(@provider || 'openai').downcase
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
def default_model
|
|
141
|
+
return @model if @model && !@model.empty?
|
|
142
|
+
case provider
|
|
143
|
+
when 'anthropic' then 'claude-3-5-sonnet-latest'
|
|
144
|
+
when 'gemini' then 'gemini-2.0-flash'
|
|
145
|
+
else 'gpt-4o-mini'
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
def relative_target
|
|
150
|
+
Pathname.new(@target_dir).relative_path_from(Pathname.new(Dir.pwd)).to_s rescue @target_dir
|
|
151
|
+
end
|
|
152
|
+
end
|
|
153
|
+
end
|
|
154
|
+
end
|