vsm 0.0.1 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.claude/settings.local.json +17 -0
- data/CLAUDE.md +134 -0
- data/README.md +675 -17
- data/Rakefile +1 -5
- data/examples/01_echo_tool.rb +51 -0
- data/examples/02_openai_streaming.rb +73 -0
- data/examples/02b_anthropic_streaming.rb +58 -0
- data/examples/02c_gemini_streaming.rb +60 -0
- data/examples/03_openai_tools.rb +106 -0
- data/examples/03b_anthropic_tools.rb +93 -0
- data/examples/03c_gemini_tools.rb +95 -0
- data/examples/05_mcp_server_and_chattty.rb +63 -0
- data/examples/06_mcp_mount_reflection.rb +45 -0
- data/examples/07_connect_claude_mcp.rb +78 -0
- data/examples/08_custom_chattty.rb +63 -0
- data/examples/09_mcp_with_llm_calls.rb +49 -0
- data/examples/10_meta_read_only.rb +56 -0
- data/exe/vsm +17 -0
- data/lib/vsm/async_channel.rb +44 -0
- data/lib/vsm/capsule.rb +46 -0
- data/lib/vsm/cli.rb +78 -0
- data/lib/vsm/drivers/anthropic/async_driver.rb +210 -0
- data/lib/vsm/drivers/family.rb +16 -0
- data/lib/vsm/drivers/gemini/async_driver.rb +149 -0
- data/lib/vsm/drivers/openai/async_driver.rb +202 -0
- data/lib/vsm/dsl.rb +80 -0
- data/lib/vsm/dsl_mcp.rb +36 -0
- data/lib/vsm/executors/fiber_executor.rb +10 -0
- data/lib/vsm/executors/thread_executor.rb +19 -0
- data/lib/vsm/generator/new_project.rb +154 -0
- data/lib/vsm/generator/templates/Gemfile.erb +9 -0
- data/lib/vsm/generator/templates/README_md.erb +40 -0
- data/lib/vsm/generator/templates/Rakefile.erb +5 -0
- data/lib/vsm/generator/templates/bin_console.erb +11 -0
- data/lib/vsm/generator/templates/bin_setup.erb +7 -0
- data/lib/vsm/generator/templates/exe_name.erb +34 -0
- data/lib/vsm/generator/templates/gemspec.erb +24 -0
- data/lib/vsm/generator/templates/gitignore.erb +10 -0
- data/lib/vsm/generator/templates/lib_name_rb.erb +9 -0
- data/lib/vsm/generator/templates/lib_organism_rb.erb +44 -0
- data/lib/vsm/generator/templates/lib_ports_chat_tty_rb.erb +12 -0
- data/lib/vsm/generator/templates/lib_tools_read_file_rb.erb +32 -0
- data/lib/vsm/generator/templates/lib_version_rb.erb +6 -0
- data/lib/vsm/homeostat.rb +19 -0
- data/lib/vsm/lens/event_hub.rb +73 -0
- data/lib/vsm/lens/server.rb +188 -0
- data/lib/vsm/lens/stats.rb +58 -0
- data/lib/vsm/lens/tui.rb +88 -0
- data/lib/vsm/lens.rb +79 -0
- data/lib/vsm/mcp/client.rb +80 -0
- data/lib/vsm/mcp/jsonrpc.rb +92 -0
- data/lib/vsm/mcp/remote_tool_capsule.rb +35 -0
- data/lib/vsm/message.rb +6 -0
- data/lib/vsm/meta/snapshot_builder.rb +121 -0
- data/lib/vsm/meta/snapshot_cache.rb +25 -0
- data/lib/vsm/meta/support.rb +35 -0
- data/lib/vsm/meta/tools.rb +498 -0
- data/lib/vsm/meta.rb +59 -0
- data/lib/vsm/observability/ledger.rb +25 -0
- data/lib/vsm/port.rb +11 -0
- data/lib/vsm/ports/chat_tty.rb +112 -0
- data/lib/vsm/ports/mcp/server_stdio.rb +101 -0
- data/lib/vsm/roles/coordination.rb +49 -0
- data/lib/vsm/roles/governance.rb +9 -0
- data/lib/vsm/roles/identity.rb +11 -0
- data/lib/vsm/roles/intelligence.rb +172 -0
- data/lib/vsm/roles/operations.rb +33 -0
- data/lib/vsm/runtime.rb +18 -0
- data/lib/vsm/tool/acts_as_tool.rb +20 -0
- data/lib/vsm/tool/capsule.rb +12 -0
- data/lib/vsm/tool/descriptor.rb +16 -0
- data/lib/vsm/version.rb +1 -1
- data/lib/vsm.rb +43 -0
- data/llms.txt +322 -0
- data/mcp_update.md +162 -0
- metadata +93 -31
- data/.rubocop.yml +0 -8
data/Rakefile
CHANGED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
$LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
|
|
3
|
+
require "vsm"
|
|
4
|
+
require "vsm/ports/chat_tty"
|
|
5
|
+
require "securerandom"
|
|
6
|
+
|
|
7
|
+
class EchoTool < VSM::ToolCapsule
|
|
8
|
+
tool_name "echo"
|
|
9
|
+
tool_description "Echoes a message"
|
|
10
|
+
tool_schema({ type: "object", properties: { text: { type: "string" } }, required: ["text"] })
|
|
11
|
+
|
|
12
|
+
def run(args)
|
|
13
|
+
"you said: #{args["text"]}"
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# Minimal “intelligence” that triggers a tool when user types "echo: ..."
|
|
18
|
+
class DemoIntelligence < VSM::Intelligence
|
|
19
|
+
def handle(message, bus:, **)
|
|
20
|
+
case message.kind
|
|
21
|
+
when :user
|
|
22
|
+
if message.payload =~ /\Aecho:\s*(.+)\z/
|
|
23
|
+
bus.emit VSM::Message.new(kind: :tool_call, payload: { tool: "echo", args: { "text" => $1 } }, corr_id: SecureRandom.uuid, meta: message.meta)
|
|
24
|
+
else
|
|
25
|
+
bus.emit VSM::Message.new(kind: :assistant, payload: "Try: echo: hello", meta: message.meta)
|
|
26
|
+
end
|
|
27
|
+
true
|
|
28
|
+
when :tool_result
|
|
29
|
+
# Complete the turn after tool execution
|
|
30
|
+
bus.emit VSM::Message.new(kind: :assistant, payload: "(done)", meta: message.meta)
|
|
31
|
+
true
|
|
32
|
+
else
|
|
33
|
+
false
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
cap = VSM::DSL.define(:demo) do
|
|
39
|
+
identity klass: VSM::Identity, args: { identity: "demo", invariants: [] }
|
|
40
|
+
governance klass: VSM::Governance
|
|
41
|
+
coordination klass: VSM::Coordination
|
|
42
|
+
intelligence klass: DemoIntelligence
|
|
43
|
+
monitoring klass: VSM::Monitoring
|
|
44
|
+
operations do
|
|
45
|
+
capsule :echo, klass: EchoTool
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
# Use the built-in, customizable ChatTTY port
|
|
50
|
+
banner = ->(io) { io.puts "\e[96mEcho demo\e[0m — type 'echo: hello' (Ctrl-C to exit)" }
|
|
51
|
+
VSM::Runtime.start(cap, ports: [VSM::Ports::ChatTTY.new(capsule: cap, banner: banner)])
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Example: OpenAI streaming demo (no tools)
|
|
4
|
+
#
|
|
5
|
+
# Usage:
|
|
6
|
+
# OPENAI_API_KEY=... AIRB_MODEL=gpt-4o-mini ruby examples/02_openai_streaming.rb
|
|
7
|
+
# VSM_DEBUG_STREAM=1 to see low-level logs
|
|
8
|
+
|
|
9
|
+
$LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
|
|
10
|
+
require "securerandom"
|
|
11
|
+
require "vsm"
|
|
12
|
+
|
|
13
|
+
MODEL = ENV["AIRB_MODEL"] || "gpt-4o-mini"
|
|
14
|
+
|
|
15
|
+
driver = VSM::Drivers::OpenAI::AsyncDriver.new(
|
|
16
|
+
api_key: ENV.fetch("OPENAI_API_KEY"),
|
|
17
|
+
model: MODEL
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
system_prompt = <<~PROMPT
|
|
21
|
+
You are a concise assistant. Answer briefly.
|
|
22
|
+
PROMPT
|
|
23
|
+
|
|
24
|
+
cap = VSM::DSL.define(:openai_stream_demo) do
|
|
25
|
+
identity klass: VSM::Identity, args: { identity: "openai_stream_demo", invariants: [] }
|
|
26
|
+
governance klass: VSM::Governance
|
|
27
|
+
coordination klass: VSM::Coordination
|
|
28
|
+
intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
|
|
29
|
+
operations klass: VSM::Operations
|
|
30
|
+
monitoring klass: VSM::Monitoring
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
if ENV["VSM_LENS"] == "1"
|
|
34
|
+
VSM::Lens.attach!(cap, port: (ENV["VSM_LENS_PORT"] || 9292).to_i, token: ENV["VSM_LENS_TOKEN"]) rescue nil
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
class StreamTTY < VSM::Port
|
|
38
|
+
def should_render?(message)
|
|
39
|
+
[:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind)
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def loop
|
|
43
|
+
sid = SecureRandom.uuid
|
|
44
|
+
puts "openai streaming demo — type to chat (Ctrl-C to exit)"
|
|
45
|
+
print "You: "
|
|
46
|
+
while (line = $stdin.gets&.chomp)
|
|
47
|
+
@capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
|
|
48
|
+
@capsule.roles[:coordination].wait_for_turn_end(sid)
|
|
49
|
+
print "You: "
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def render_out(msg)
|
|
54
|
+
case msg.kind
|
|
55
|
+
when :assistant_delta
|
|
56
|
+
# Stream without newline
|
|
57
|
+
print msg.payload
|
|
58
|
+
$stdout.flush
|
|
59
|
+
when :assistant
|
|
60
|
+
puts "" # end the line after streaming
|
|
61
|
+
# The :assistant event carries the full final text again; avoid re-printing it
|
|
62
|
+
# because we've already streamed the deltas above. Just show the turn marker.
|
|
63
|
+
puts "(turn #{msg.meta&.dig(:turn_id)})"
|
|
64
|
+
when :tool_result
|
|
65
|
+
puts "\nTool> #{msg.payload}"
|
|
66
|
+
when :tool_call
|
|
67
|
+
puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id}) #{msg.payload[:args].inspect}"
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
VSM::Runtime.start(cap, ports: [StreamTTY.new(capsule: cap)])
|
|
73
|
+
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Example: Anthropic streaming demo (no tools)
|
|
4
|
+
|
|
5
|
+
$LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
|
|
6
|
+
require "securerandom"
|
|
7
|
+
require "vsm"
|
|
8
|
+
|
|
9
|
+
MODEL = ENV["AIRB_MODEL"] || "claude-sonnet-4-0"
|
|
10
|
+
|
|
11
|
+
driver = VSM::Drivers::Anthropic::AsyncDriver.new(
|
|
12
|
+
api_key: ENV.fetch("ANTHROPIC_API_KEY"),
|
|
13
|
+
model: MODEL
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
system_prompt = "You are a concise assistant. Answer briefly."
|
|
17
|
+
|
|
18
|
+
cap = VSM::DSL.define(:anthropic_stream_demo) do
|
|
19
|
+
identity klass: VSM::Identity, args: { identity: "anthropic_stream_demo", invariants: [] }
|
|
20
|
+
governance klass: VSM::Governance
|
|
21
|
+
coordination klass: VSM::Coordination
|
|
22
|
+
intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
|
|
23
|
+
operations klass: VSM::Operations
|
|
24
|
+
monitoring klass: VSM::Monitoring
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
class StreamTTY < VSM::Port
|
|
28
|
+
def should_render?(message)
|
|
29
|
+
[:assistant_delta, :assistant].include?(message.kind) || message.kind == :tool_calls
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def loop
|
|
33
|
+
sid = SecureRandom.uuid
|
|
34
|
+
puts "anthropic streaming demo — type to chat (Ctrl-C to exit)"
|
|
35
|
+
print "You: "
|
|
36
|
+
while (line = $stdin.gets&.chomp)
|
|
37
|
+
@capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
|
|
38
|
+
@capsule.roles[:coordination].wait_for_turn_end(sid)
|
|
39
|
+
print "You: "
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def render_out(msg)
|
|
44
|
+
case msg.kind
|
|
45
|
+
when :assistant_delta
|
|
46
|
+
print msg.payload
|
|
47
|
+
$stdout.flush
|
|
48
|
+
when :assistant
|
|
49
|
+
puts ""
|
|
50
|
+
puts "(turn #{msg.meta&.dig(:turn_id)})"
|
|
51
|
+
when :tool_calls
|
|
52
|
+
puts "\n(tool_calls #{msg.payload&.size || 0})"
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
VSM::Runtime.start(cap, ports: [StreamTTY.new(capsule: cap)])
|
|
58
|
+
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Example: Gemini streaming demo (no tools)
|
|
4
|
+
|
|
5
|
+
$LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
|
|
6
|
+
require "securerandom"
|
|
7
|
+
require "vsm"
|
|
8
|
+
|
|
9
|
+
MODEL = ENV["AIRB_MODEL"] || "gemini-2.5-flash"
|
|
10
|
+
|
|
11
|
+
driver = VSM::Drivers::Gemini::AsyncDriver.new(
|
|
12
|
+
api_key: ENV.fetch("GEMINI_API_KEY"),
|
|
13
|
+
model: MODEL,
|
|
14
|
+
streaming: true
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
system_prompt = "You are a concise assistant. Answer briefly."
|
|
18
|
+
|
|
19
|
+
cap = VSM::DSL.define(:gemini_stream_demo) do
|
|
20
|
+
identity klass: VSM::Identity, args: { identity: "gemini_stream_demo", invariants: [] }
|
|
21
|
+
governance klass: VSM::Governance
|
|
22
|
+
coordination klass: VSM::Coordination
|
|
23
|
+
intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
|
|
24
|
+
operations klass: VSM::Operations
|
|
25
|
+
monitoring klass: VSM::Monitoring
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
class StreamTTY < VSM::Port
|
|
29
|
+
def should_render?(message)
|
|
30
|
+
[:assistant_delta, :assistant].include?(message.kind) || message.kind == :tool_calls
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def loop
|
|
34
|
+
sid = SecureRandom.uuid
|
|
35
|
+
puts "gemini streaming demo — type to chat (Ctrl-C to exit)"
|
|
36
|
+
print "You: "
|
|
37
|
+
while (line = $stdin.gets&.chomp)
|
|
38
|
+
@capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
|
|
39
|
+
@capsule.roles[:coordination].wait_for_turn_end(sid)
|
|
40
|
+
print "You: "
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def render_out(msg)
|
|
45
|
+
case msg.kind
|
|
46
|
+
when :assistant_delta
|
|
47
|
+
print msg.payload
|
|
48
|
+
$stdout.flush
|
|
49
|
+
when :assistant
|
|
50
|
+
puts ""
|
|
51
|
+
puts "(turn #{msg.meta&.dig(:turn_id)})"
|
|
52
|
+
when :tool_calls
|
|
53
|
+
puts "\n(tool_calls #{msg.payload&.size || 0})"
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
VSM::Runtime.start(cap, ports: [StreamTTY.new(capsule: cap)])
|
|
59
|
+
|
|
60
|
+
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Example: OpenAI tool-calling demo (list_files/read_file)
|
|
4
|
+
#
|
|
5
|
+
# Usage:
|
|
6
|
+
# OPENAI_API_KEY=... AIRB_MODEL=gpt-4o-mini ruby examples/03_openai_tools.rb
|
|
7
|
+
# VSM_DEBUG_STREAM=1 to see low-level logs
|
|
8
|
+
|
|
9
|
+
$LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
|
|
10
|
+
require "securerandom"
|
|
11
|
+
require "json"
|
|
12
|
+
require "vsm"
|
|
13
|
+
|
|
14
|
+
MODEL = ENV["AIRB_MODEL"] || "gpt-4o-mini"
|
|
15
|
+
|
|
16
|
+
# Simple file tools scoped to current working directory
|
|
17
|
+
class ListFiles < VSM::ToolCapsule
|
|
18
|
+
tool_name "list_files"
|
|
19
|
+
tool_description "List files in a directory"
|
|
20
|
+
tool_schema({ type: "object", properties: { path: { type: "string" } }, required: [] })
|
|
21
|
+
def run(args)
|
|
22
|
+
path = args["path"].to_s.strip
|
|
23
|
+
path = "." if path.empty?
|
|
24
|
+
entries = Dir.children(path).sort.take(200)
|
|
25
|
+
entries.join("\n")
|
|
26
|
+
rescue => e
|
|
27
|
+
"ERROR: #{e.class}: #{e.message}"
|
|
28
|
+
end
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
class ReadFile < VSM::ToolCapsule
|
|
32
|
+
tool_name "read_file"
|
|
33
|
+
tool_description "Read a small text file"
|
|
34
|
+
tool_schema({ type: "object", properties: { path: { type: "string" } }, required: ["path"] })
|
|
35
|
+
def run(args)
|
|
36
|
+
path = args["path"].to_s
|
|
37
|
+
raise "path required" if path.empty?
|
|
38
|
+
raise "too large" if File.size(path) > 200_000
|
|
39
|
+
File.read(path)
|
|
40
|
+
rescue => e
|
|
41
|
+
"ERROR: #{e.class}: #{e.message}"
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
driver = VSM::Drivers::OpenAI::AsyncDriver.new(
|
|
46
|
+
api_key: ENV.fetch("OPENAI_API_KEY"),
|
|
47
|
+
model: MODEL
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
system_prompt = <<~PROMPT
|
|
51
|
+
You are a coding assistant with two tools: list_files and read_file.
|
|
52
|
+
Prefer to call tools when appropriate. Keep answers brief.
|
|
53
|
+
PROMPT
|
|
54
|
+
|
|
55
|
+
cap = VSM::DSL.define(:openai_tools_demo) do
|
|
56
|
+
identity klass: VSM::Identity, args: { identity: "openai_tools_demo", invariants: [] }
|
|
57
|
+
governance klass: VSM::Governance
|
|
58
|
+
coordination klass: VSM::Coordination
|
|
59
|
+
intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
|
|
60
|
+
monitoring klass: VSM::Monitoring
|
|
61
|
+
operations do
|
|
62
|
+
capsule :list_files, klass: ListFiles
|
|
63
|
+
capsule :read_file, klass: ReadFile
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
if ENV["VSM_LENS"] == "1"
|
|
68
|
+
VSM::Lens.attach!(cap, port: (ENV["VSM_LENS_PORT"] || 9292).to_i, token: ENV["VSM_LENS_TOKEN"]) rescue nil
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
class ToolTTY < VSM::Port
|
|
72
|
+
def should_render?(message)
|
|
73
|
+
[:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind)
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
def loop
|
|
77
|
+
sid = SecureRandom.uuid
|
|
78
|
+
puts "openai tools demo — type to chat (Ctrl-C to exit)"
|
|
79
|
+
print "You: "
|
|
80
|
+
while (line = $stdin.gets&.chomp)
|
|
81
|
+
@capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
|
|
82
|
+
@capsule.roles[:coordination].wait_for_turn_end(sid)
|
|
83
|
+
print "You: "
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
def render_out(msg)
|
|
88
|
+
case msg.kind
|
|
89
|
+
when :assistant_delta
|
|
90
|
+
print msg.payload
|
|
91
|
+
$stdout.flush
|
|
92
|
+
when :assistant
|
|
93
|
+
puts ""
|
|
94
|
+
puts "(turn #{msg.meta&.dig(:turn_id)})"
|
|
95
|
+
when :tool_call
|
|
96
|
+
puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id})"
|
|
97
|
+
when :tool_result
|
|
98
|
+
puts "\nTool> (completed)"
|
|
99
|
+
end
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
VSM::Runtime.start(cap, ports: [ToolTTY.new(capsule: cap)])
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Example: Anthropic tool-calling demo (list_files/read_file)
|
|
4
|
+
|
|
5
|
+
$LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
|
|
6
|
+
require "securerandom"
|
|
7
|
+
require "vsm"
|
|
8
|
+
|
|
9
|
+
MODEL = ENV["AIRB_MODEL"] || "claude-sonnet-4-0"
|
|
10
|
+
|
|
11
|
+
class ListFiles < VSM::ToolCapsule
|
|
12
|
+
tool_name "list_files"
|
|
13
|
+
tool_description "List files in a directory"
|
|
14
|
+
tool_schema({ type: "object", properties: { path: { type: "string" } }, required: [] })
|
|
15
|
+
def run(args)
|
|
16
|
+
path = args["path"].to_s.strip
|
|
17
|
+
path = "." if path.empty?
|
|
18
|
+
Dir.children(path).sort.take(200).join("\n")
|
|
19
|
+
rescue => e
|
|
20
|
+
"ERROR: #{e.class}: #{e.message}"
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
class ReadFile < VSM::ToolCapsule
|
|
25
|
+
tool_name "read_file"
|
|
26
|
+
tool_description "Read a small text file"
|
|
27
|
+
tool_schema({ type: "object", properties: { path: { type: "string" } }, required: ["path"] })
|
|
28
|
+
def run(args)
|
|
29
|
+
path = args["path"].to_s
|
|
30
|
+
raise "path required" if path.empty?
|
|
31
|
+
raise "too large" if File.size(path) > 200_000
|
|
32
|
+
File.read(path)
|
|
33
|
+
rescue => e
|
|
34
|
+
"ERROR: #{e.class}: #{e.message}"
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
driver = VSM::Drivers::Anthropic::AsyncDriver.new(
|
|
39
|
+
api_key: ENV.fetch("ANTHROPIC_API_KEY"),
|
|
40
|
+
model: MODEL
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
system_prompt = <<~PROMPT
|
|
44
|
+
You are a coding assistant with two tools: list_files and read_file.
|
|
45
|
+
Prefer to call tools when appropriate. Keep answers brief.
|
|
46
|
+
PROMPT
|
|
47
|
+
|
|
48
|
+
cap = VSM::DSL.define(:anthropic_tools_demo) do
|
|
49
|
+
identity klass: VSM::Identity, args: { identity: "anthropic_tools_demo", invariants: [] }
|
|
50
|
+
governance klass: VSM::Governance
|
|
51
|
+
coordination klass: VSM::Coordination
|
|
52
|
+
intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
|
|
53
|
+
monitoring klass: VSM::Monitoring
|
|
54
|
+
operations do
|
|
55
|
+
capsule :list_files, klass: ListFiles
|
|
56
|
+
capsule :read_file, klass: ReadFile
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
class ToolTTY < VSM::Port
|
|
61
|
+
def should_render?(message)
|
|
62
|
+
[:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind)
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def loop
|
|
66
|
+
sid = SecureRandom.uuid
|
|
67
|
+
puts "anthropic tools demo — type to chat (Ctrl-C to exit)"
|
|
68
|
+
print "You: "
|
|
69
|
+
while (line = $stdin.gets&.chomp)
|
|
70
|
+
@capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
|
|
71
|
+
@capsule.roles[:coordination].wait_for_turn_end(sid)
|
|
72
|
+
print "You: "
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
def render_out(msg)
|
|
77
|
+
case msg.kind
|
|
78
|
+
when :assistant_delta
|
|
79
|
+
print msg.payload
|
|
80
|
+
$stdout.flush
|
|
81
|
+
when :assistant
|
|
82
|
+
puts ""
|
|
83
|
+
puts "(turn #{msg.meta&.dig(:turn_id)})"
|
|
84
|
+
when :tool_call
|
|
85
|
+
puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id})"
|
|
86
|
+
when :tool_result
|
|
87
|
+
puts "\nTool> (completed)"
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
VSM::Runtime.start(cap, ports: [ToolTTY.new(capsule: cap)])
|
|
93
|
+
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Example: Gemini tool-calling demo (list_files/read_file) with streaming enabled
|
|
4
|
+
|
|
5
|
+
$LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
|
|
6
|
+
require "securerandom"
|
|
7
|
+
require "vsm"
|
|
8
|
+
|
|
9
|
+
MODEL = ENV["AIRB_MODEL"] || "gemini-2.5-flash"
|
|
10
|
+
|
|
11
|
+
class ListFiles < VSM::ToolCapsule
|
|
12
|
+
tool_name "list_files"
|
|
13
|
+
tool_description "List files in a directory"
|
|
14
|
+
tool_schema({ type: "object", properties: { path: { type: "string" } }, required: [] })
|
|
15
|
+
def run(args)
|
|
16
|
+
path = args["path"].to_s.strip
|
|
17
|
+
path = "." if path.empty?
|
|
18
|
+
Dir.children(path).sort.take(200).join("\n")
|
|
19
|
+
rescue => e
|
|
20
|
+
"ERROR: #{e.class}: #{e.message}"
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
class ReadFile < VSM::ToolCapsule
|
|
25
|
+
tool_name "read_file"
|
|
26
|
+
tool_description "Read a small text file"
|
|
27
|
+
tool_schema({ type: "object", properties: { path: { type: "string" } }, required: ["path"] })
|
|
28
|
+
def run(args)
|
|
29
|
+
path = args["path"].to_s
|
|
30
|
+
raise "path required" if path.empty?
|
|
31
|
+
raise "too large" if File.size(path) > 200_000
|
|
32
|
+
File.read(path)
|
|
33
|
+
rescue => e
|
|
34
|
+
"ERROR: #{e.class}: #{e.message}"
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
driver = VSM::Drivers::Gemini::AsyncDriver.new(
|
|
39
|
+
api_key: ENV.fetch("GEMINI_API_KEY"),
|
|
40
|
+
model: MODEL,
|
|
41
|
+
streaming: true
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
system_prompt = <<~PROMPT
|
|
45
|
+
You are a coding assistant with two tools: list_files and read_file.
|
|
46
|
+
Prefer to call tools when appropriate. Keep answers brief.
|
|
47
|
+
PROMPT
|
|
48
|
+
|
|
49
|
+
cap = VSM::DSL.define(:gemini_tools_demo) do
|
|
50
|
+
identity klass: VSM::Identity, args: { identity: "gemini_tools_demo", invariants: [] }
|
|
51
|
+
governance klass: VSM::Governance
|
|
52
|
+
coordination klass: VSM::Coordination
|
|
53
|
+
intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
|
|
54
|
+
monitoring klass: VSM::Monitoring
|
|
55
|
+
operations do
|
|
56
|
+
capsule :list_files, klass: ListFiles
|
|
57
|
+
capsule :read_file, klass: ReadFile
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
class ToolTTY < VSM::Port
|
|
62
|
+
def should_render?(message)
|
|
63
|
+
[:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind)
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def loop
|
|
67
|
+
sid = SecureRandom.uuid
|
|
68
|
+
puts "gemini tools demo — type to chat (Ctrl-C to exit)"
|
|
69
|
+
print "You: "
|
|
70
|
+
while (line = $stdin.gets&.chomp)
|
|
71
|
+
@capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
|
|
72
|
+
@capsule.roles[:coordination].wait_for_turn_end(sid)
|
|
73
|
+
print "You: "
|
|
74
|
+
end
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def render_out(msg)
|
|
78
|
+
case msg.kind
|
|
79
|
+
when :assistant_delta
|
|
80
|
+
print msg.payload
|
|
81
|
+
$stdout.flush
|
|
82
|
+
when :assistant
|
|
83
|
+
puts ""
|
|
84
|
+
puts "(turn #{msg.meta&.dig(:turn_id)})"
|
|
85
|
+
when :tool_call
|
|
86
|
+
puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id})"
|
|
87
|
+
when :tool_result
|
|
88
|
+
puts "\nTool> (completed)"
|
|
89
|
+
end
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
VSM::Runtime.start(cap, ports: [ToolTTY.new(capsule: cap)])
|
|
94
|
+
|
|
95
|
+
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
$LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
|
|
3
|
+
require "vsm"
|
|
4
|
+
require "securerandom"
|
|
5
|
+
require "vsm/ports/chat_tty"
|
|
6
|
+
require "vsm/ports/mcp/server_stdio"
|
|
7
|
+
|
|
8
|
+
# A simple local tool we can expose to both ChatTTY and MCP stdio.
|
|
9
|
+
class EchoTool < VSM::ToolCapsule
|
|
10
|
+
tool_name "echo"
|
|
11
|
+
tool_description "Echoes back the provided text"
|
|
12
|
+
tool_schema({ type: "object", properties: { text: { type: "string" } }, required: ["text"] })
|
|
13
|
+
def run(args)
|
|
14
|
+
"you said: #{args["text"]}"
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
# Minimal intelligence that triggers the echo tool when user types: echo: ...
|
|
19
|
+
class DemoIntelligence < VSM::Intelligence
|
|
20
|
+
def handle(message, bus:, **)
|
|
21
|
+
case message.kind
|
|
22
|
+
when :user
|
|
23
|
+
if message.payload =~ /\Aecho:\s*(.+)\z/
|
|
24
|
+
bus.emit VSM::Message.new(kind: :tool_call, payload: { tool: "echo", args: { "text" => $1 } }, corr_id: SecureRandom.uuid, meta: message.meta)
|
|
25
|
+
else
|
|
26
|
+
bus.emit VSM::Message.new(kind: :assistant, payload: "Try: echo: hello", meta: message.meta)
|
|
27
|
+
end
|
|
28
|
+
true
|
|
29
|
+
when :tool_result
|
|
30
|
+
bus.emit VSM::Message.new(kind: :assistant, payload: "(done)", meta: message.meta)
|
|
31
|
+
true
|
|
32
|
+
else
|
|
33
|
+
false
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
cap = VSM::DSL.define(:demo_mcp_server_and_chat) do
|
|
39
|
+
identity klass: VSM::Identity, args: { identity: "demo", invariants: [] }
|
|
40
|
+
governance klass: VSM::Governance
|
|
41
|
+
coordination klass: VSM::Coordination
|
|
42
|
+
intelligence klass: DemoIntelligence
|
|
43
|
+
monitoring klass: VSM::Monitoring
|
|
44
|
+
operations do
|
|
45
|
+
capsule :echo, klass: EchoTool
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
# Run both ports together: MCP stdio (machine) + ChatTTY (human).
|
|
50
|
+
banner = ->(io) { io.puts "\e[96mVSM demo\e[0m — type 'echo: hi' (Ctrl-C to exit)" }
|
|
51
|
+
ports = [VSM::Ports::MCP::ServerStdio.new(capsule: cap)]
|
|
52
|
+
if $stdout.tty?
|
|
53
|
+
# Only enable interactive ChatTTY when attached to a TTY to avoid
|
|
54
|
+
# interfering when this example is spawned as a background MCP server.
|
|
55
|
+
begin
|
|
56
|
+
tty = File.open("/dev/tty", "r+")
|
|
57
|
+
rescue StandardError
|
|
58
|
+
tty = nil
|
|
59
|
+
end
|
|
60
|
+
ports << VSM::Ports::ChatTTY.new(capsule: cap, banner: banner, input: tty, output: tty)
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
VSM::Runtime.start(cap, ports: ports)
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
$LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
|
|
3
|
+
require "vsm"
|
|
4
|
+
require "vsm/dsl_mcp"
|
|
5
|
+
require "vsm/ports/chat_tty"
|
|
6
|
+
require "securerandom"
|
|
7
|
+
|
|
8
|
+
# This example mounts a remote MCP server (we use example 05 as the server)
|
|
9
|
+
# and exposes its tools locally via dynamic reflection. Type: echo: hello
|
|
10
|
+
|
|
11
|
+
class DemoIntelligence < VSM::Intelligence
|
|
12
|
+
def handle(message, bus:, **)
|
|
13
|
+
case message.kind
|
|
14
|
+
when :user
|
|
15
|
+
if message.payload =~ /\Aecho:\s*(.+)\z/
|
|
16
|
+
bus.emit VSM::Message.new(kind: :tool_call, payload: { tool: "echo", args: { "text" => $1 } }, corr_id: SecureRandom.uuid, meta: message.meta)
|
|
17
|
+
else
|
|
18
|
+
bus.emit VSM::Message.new(kind: :assistant, payload: "Try: echo: hello", meta: message.meta)
|
|
19
|
+
end
|
|
20
|
+
true
|
|
21
|
+
when :tool_result
|
|
22
|
+
bus.emit VSM::Message.new(kind: :assistant, payload: "(done)", meta: message.meta)
|
|
23
|
+
true
|
|
24
|
+
else
|
|
25
|
+
false
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
server_cmd = "ruby #{File.expand_path("05_mcp_server_and_chattty.rb", __dir__)}"
|
|
31
|
+
|
|
32
|
+
cap = VSM::DSL.define(:mcp_mount_demo) do
|
|
33
|
+
identity klass: VSM::Identity, args: { identity: "mcp_mount_demo", invariants: [] }
|
|
34
|
+
governance klass: VSM::Governance
|
|
35
|
+
coordination klass: VSM::Coordination
|
|
36
|
+
intelligence klass: DemoIntelligence
|
|
37
|
+
monitoring klass: VSM::Monitoring
|
|
38
|
+
operations do
|
|
39
|
+
# Reflect the remote server's tools; include only :echo and expose as local name "echo"
|
|
40
|
+
mcp_server :demo_server, cmd: server_cmd, include: %w[echo]
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
banner = ->(io) { io.puts "\e[96mMCP mount demo\e[0m — type 'echo: hi' (Ctrl-C to exit)" }
|
|
45
|
+
VSM::Runtime.start(cap, ports: [VSM::Ports::ChatTTY.new(capsule: cap, banner: banner)])
|