vsm 0.0.1 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. checksums.yaml +4 -4
  2. data/.claude/settings.local.json +17 -0
  3. data/CLAUDE.md +134 -0
  4. data/README.md +675 -17
  5. data/Rakefile +1 -5
  6. data/examples/01_echo_tool.rb +51 -0
  7. data/examples/02_openai_streaming.rb +73 -0
  8. data/examples/02b_anthropic_streaming.rb +58 -0
  9. data/examples/02c_gemini_streaming.rb +60 -0
  10. data/examples/03_openai_tools.rb +106 -0
  11. data/examples/03b_anthropic_tools.rb +93 -0
  12. data/examples/03c_gemini_tools.rb +95 -0
  13. data/examples/05_mcp_server_and_chattty.rb +63 -0
  14. data/examples/06_mcp_mount_reflection.rb +45 -0
  15. data/examples/07_connect_claude_mcp.rb +78 -0
  16. data/examples/08_custom_chattty.rb +63 -0
  17. data/examples/09_mcp_with_llm_calls.rb +49 -0
  18. data/examples/10_meta_read_only.rb +56 -0
  19. data/exe/vsm +17 -0
  20. data/lib/vsm/async_channel.rb +44 -0
  21. data/lib/vsm/capsule.rb +46 -0
  22. data/lib/vsm/cli.rb +78 -0
  23. data/lib/vsm/drivers/anthropic/async_driver.rb +210 -0
  24. data/lib/vsm/drivers/family.rb +16 -0
  25. data/lib/vsm/drivers/gemini/async_driver.rb +149 -0
  26. data/lib/vsm/drivers/openai/async_driver.rb +202 -0
  27. data/lib/vsm/dsl.rb +80 -0
  28. data/lib/vsm/dsl_mcp.rb +36 -0
  29. data/lib/vsm/executors/fiber_executor.rb +10 -0
  30. data/lib/vsm/executors/thread_executor.rb +19 -0
  31. data/lib/vsm/generator/new_project.rb +154 -0
  32. data/lib/vsm/generator/templates/Gemfile.erb +9 -0
  33. data/lib/vsm/generator/templates/README_md.erb +40 -0
  34. data/lib/vsm/generator/templates/Rakefile.erb +5 -0
  35. data/lib/vsm/generator/templates/bin_console.erb +11 -0
  36. data/lib/vsm/generator/templates/bin_setup.erb +7 -0
  37. data/lib/vsm/generator/templates/exe_name.erb +34 -0
  38. data/lib/vsm/generator/templates/gemspec.erb +24 -0
  39. data/lib/vsm/generator/templates/gitignore.erb +10 -0
  40. data/lib/vsm/generator/templates/lib_name_rb.erb +9 -0
  41. data/lib/vsm/generator/templates/lib_organism_rb.erb +44 -0
  42. data/lib/vsm/generator/templates/lib_ports_chat_tty_rb.erb +12 -0
  43. data/lib/vsm/generator/templates/lib_tools_read_file_rb.erb +32 -0
  44. data/lib/vsm/generator/templates/lib_version_rb.erb +6 -0
  45. data/lib/vsm/homeostat.rb +19 -0
  46. data/lib/vsm/lens/event_hub.rb +73 -0
  47. data/lib/vsm/lens/server.rb +188 -0
  48. data/lib/vsm/lens/stats.rb +58 -0
  49. data/lib/vsm/lens/tui.rb +88 -0
  50. data/lib/vsm/lens.rb +79 -0
  51. data/lib/vsm/mcp/client.rb +80 -0
  52. data/lib/vsm/mcp/jsonrpc.rb +92 -0
  53. data/lib/vsm/mcp/remote_tool_capsule.rb +35 -0
  54. data/lib/vsm/message.rb +6 -0
  55. data/lib/vsm/meta/snapshot_builder.rb +121 -0
  56. data/lib/vsm/meta/snapshot_cache.rb +25 -0
  57. data/lib/vsm/meta/support.rb +35 -0
  58. data/lib/vsm/meta/tools.rb +498 -0
  59. data/lib/vsm/meta.rb +59 -0
  60. data/lib/vsm/observability/ledger.rb +25 -0
  61. data/lib/vsm/port.rb +11 -0
  62. data/lib/vsm/ports/chat_tty.rb +112 -0
  63. data/lib/vsm/ports/mcp/server_stdio.rb +101 -0
  64. data/lib/vsm/roles/coordination.rb +49 -0
  65. data/lib/vsm/roles/governance.rb +9 -0
  66. data/lib/vsm/roles/identity.rb +11 -0
  67. data/lib/vsm/roles/intelligence.rb +172 -0
  68. data/lib/vsm/roles/operations.rb +33 -0
  69. data/lib/vsm/runtime.rb +18 -0
  70. data/lib/vsm/tool/acts_as_tool.rb +20 -0
  71. data/lib/vsm/tool/capsule.rb +12 -0
  72. data/lib/vsm/tool/descriptor.rb +16 -0
  73. data/lib/vsm/version.rb +1 -1
  74. data/lib/vsm.rb +43 -0
  75. data/llms.txt +322 -0
  76. data/mcp_update.md +162 -0
  77. metadata +93 -31
  78. data/.rubocop.yml +0 -8
@@ -0,0 +1,78 @@
1
+ # frozen_string_literal: true
2
+ $LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
3
+ require "json"
4
+ require "securerandom"
5
+ require "vsm"
6
+ require "vsm/dsl_mcp"
7
+ require "vsm/ports/chat_tty"
8
+
9
+ # Example: Connect to an external MCP server (Claude Code)
10
+ #
11
+ # Prereqs:
12
+ # - Install Claude CLI and log in.
13
+ # - Ensure `claude mcp serve` works in your shell.
14
+ #
15
+ # IMPORTANT: Many MCP servers (including Claude) use LSP-style Content-Length
16
+ # framing over stdio. The minimal transport in this repo currently uses NDJSON
17
+ # (one JSON per line). If this example hangs or fails, it's due to framing
18
+ # mismatch; swap the transport to LSP framing in lib/vsm/mcp/jsonrpc.rb.
19
+ #
20
+ # Usage:
21
+ # ruby examples/07_connect_claude_mcp.rb
22
+ # Then type:
23
+ # list
24
+ # call: some_tool {"arg1":"value"}
25
+ #
26
+ # This example avoids requiring any LLM API keys by letting you call tools manually
27
+ # via a simple chat convention.
28
+
29
+ # Intelligence that recognizes two commands:
30
+ # - "list" → prints available tools
31
+ # - "call: NAME {json}" → invokes the reflected tool with JSON args
32
+ class ManualMCPIntelligence < VSM::Intelligence
33
+ def handle(message, bus:, **)
34
+ return false unless message.kind == :user
35
+ line = message.payload.to_s.strip
36
+ if line == "list"
37
+ # Inspect operations children for tool descriptors
38
+ ops = bus.context[:operations_children] || {}
39
+ tools = ops.values.select { _1.respond_to?(:tool_descriptor) }.map { _1.tool_descriptor.name }
40
+ bus.emit VSM::Message.new(kind: :assistant, payload: tools.any? ? "tools: #{tools.join(", ")}" : "(no tools)", meta: message.meta)
41
+ return true
42
+ elsif line.start_with?("call:")
43
+ if line =~ /\Acall:\s*(\S+)\s*(\{.*\})?\z/
44
+ tool = $1
45
+ json = $2
46
+ args = json ? (JSON.parse(json) rescue {}) : {}
47
+ bus.emit VSM::Message.new(kind: :tool_call, payload: { tool: tool, args: args }, corr_id: SecureRandom.uuid, meta: message.meta)
48
+ return true
49
+ else
50
+ bus.emit VSM::Message.new(kind: :assistant, payload: "usage: call: NAME {json}", meta: message.meta)
51
+ return true
52
+ end
53
+ else
54
+ bus.emit VSM::Message.new(kind: :assistant, payload: "Commands: list | call: NAME {json}", meta: message.meta)
55
+ return true
56
+ end
57
+ end
58
+ end
59
+
60
+ cap = VSM::DSL.define(:claude_mcp_client) do
61
+ identity klass: VSM::Identity, args: { identity: "claude_mcp_client", invariants: [] }
62
+ governance klass: VSM::Governance
63
+ coordination klass: VSM::Coordination
64
+ intelligence klass: ManualMCPIntelligence
65
+ monitoring klass: VSM::Monitoring
66
+ operations do
67
+ # Reflect all available tools from the external server.
68
+ # Tip: if tool names collide with locals, use prefix: "claude_".
69
+ mcp_server :claude, cmd: ["claude", "mcp", "serve"]
70
+ end
71
+ end
72
+
73
+ banner = ->(io) do
74
+ io.puts "\e[96mMCP client (Claude)\e[0m"
75
+ io.puts "Type 'list' or 'call: NAME {json}'"
76
+ end
77
+
78
+ VSM::Runtime.start(cap, ports: [VSM::Ports::ChatTTY.new(capsule: cap, banner: banner, prompt: "You> ")])
@@ -0,0 +1,63 @@
1
+ # frozen_string_literal: true
2
+ $LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
3
+ require "vsm"
4
+ require "vsm/ports/chat_tty"
5
+ require "securerandom"
6
+
7
+ # Demonstrates subclassing ChatTTY to customize the banner and output formatting.
8
+
9
+ class EchoTool < VSM::ToolCapsule
10
+ tool_name "echo"
11
+ tool_description "Echoes back the provided text"
12
+ tool_schema({ type: "object", properties: { text: { type: "string" } }, required: ["text"] })
13
+ def run(args)
14
+ "you said: #{args["text"]}"
15
+ end
16
+ end
17
+
18
+ class DemoIntelligence < VSM::Intelligence
19
+ def handle(message, bus:, **)
20
+ return false unless message.kind == :user
21
+ if message.payload =~ /\Aecho:\s*(.+)\z/
22
+ bus.emit VSM::Message.new(kind: :tool_call, payload: { tool: "echo", args: { "text" => $1 } }, corr_id: SecureRandom.uuid, meta: message.meta)
23
+ else
24
+ bus.emit VSM::Message.new(kind: :assistant, payload: "Try: echo: hello", meta: message.meta)
25
+ end
26
+ true
27
+ end
28
+ end
29
+
30
+ class FancyTTY < VSM::Ports::ChatTTY
31
+ def banner(io)
32
+ io.puts "\e[95m\n ███ CUSTOM CHAT ███\n\e[0m"
33
+ end
34
+
35
+ def render_out(m)
36
+ case m.kind
37
+ when :assistant_delta
38
+ @streaming = true
39
+ @out.print m.payload
40
+ @out.flush
41
+ when :assistant
42
+ @out.puts unless @streaming
43
+ @streaming = false
44
+ when :tool_call
45
+ @out.puts "\n\e[90m→ calling #{m.payload[:tool]}\e[0m"
46
+ when :tool_result
47
+ @out.puts "\e[92m✓ #{m.payload}\e[0m"
48
+ end
49
+ end
50
+ end
51
+
52
+ cap = VSM::DSL.define(:fancy_chat) do
53
+ identity klass: VSM::Identity, args: { identity: "fancy_chat", invariants: [] }
54
+ governance klass: VSM::Governance
55
+ coordination klass: VSM::Coordination
56
+ intelligence klass: DemoIntelligence
57
+ monitoring klass: VSM::Monitoring
58
+ operations do
59
+ capsule :echo, klass: EchoTool
60
+ end
61
+ end
62
+
63
+ VSM::Runtime.start(cap, ports: [FancyTTY.new(capsule: cap, prompt: "Me: ")])
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+ $LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
3
+ require "vsm"
4
+ require "vsm/dsl_mcp"
5
+ require "vsm/ports/chat_tty"
6
+
7
+ # Example: Use an LLM driver (OpenAI) to automatically call tools exposed by an MCP server.
8
+ #
9
+ # Prereqs:
10
+ # - OPENAI_API_KEY must be set
11
+ # - An MCP server available on your PATH, e.g. `claude mcp serve`
12
+ #
13
+ # Usage:
14
+ # OPENAI_API_KEY=... AIRB_MODEL=gpt-4o-mini ruby examples/09_mcp_with_llm_calls.rb
15
+ # Type a question; the model will choose tools from the reflected MCP server.
16
+
17
+ MODEL = ENV["AIRB_MODEL"] || "gpt-4o-mini"
18
+
19
+ driver = VSM::Drivers::OpenAI::AsyncDriver.new(
20
+ api_key: ENV.fetch("OPENAI_API_KEY"),
21
+ model: MODEL
22
+ )
23
+
24
+ system_prompt = <<~PROMPT
25
+ You are a helpful assistant. You have access to the listed tools.
26
+ When a tool can help, call it with appropriate JSON arguments.
27
+ Keep final answers concise.
28
+ PROMPT
29
+
30
+ cap = VSM::DSL.define(:mcp_with_llm) do
31
+ identity klass: VSM::Identity, args: { identity: "mcp_with_llm", invariants: [] }
32
+ governance klass: VSM::Governance
33
+ coordination klass: VSM::Coordination
34
+ intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
35
+ monitoring klass: VSM::Monitoring
36
+ operations do
37
+ # Reflect tools from an external MCP server (e.g., Claude Code).
38
+ # If your server requires strict LSP framing, run with VSM_MCP_LSP=1.
39
+ # You can also prefix names to avoid collisions: prefix: "claude_"
40
+ mcp_server :claude, cmd: ["claude", "mcp", "serve"]
41
+ end
42
+ end
43
+
44
+ banner = ->(io) do
45
+ io.puts "\e[96mLLM + MCP tools\e[0m — Ask a question; model may call tools."
46
+ end
47
+
48
+ VSM::Runtime.start(cap, ports: [VSM::Ports::ChatTTY.new(capsule: cap, banner: banner, prompt: "You> ")])
49
+
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Demo: use OpenAI tool-calling to let an LLM inspect the running capsule via
4
+ # the read-only meta tools. Set OPENAI_API_KEY (and optionally AIRB_MODEL) then:
5
+ # bundle exec ruby examples/10_meta_read_only.rb
6
+ # Ask things like "What can you do?" or "Explain meta_demo_tool" and the model
7
+ # will call the meta tools to gather context before replying.
8
+
9
+ $LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
10
+
11
+ require "securerandom"
12
+ require "vsm"
13
+
14
+ MODEL = ENV["AIRB_MODEL"] || "gpt-4o-mini"
15
+ API_KEY = ENV["OPENAI_API_KEY"] or abort "OPENAI_API_KEY required for this demo"
16
+
17
+ class MetaDemoTool < VSM::ToolCapsule
18
+ tool_name "meta_demo_tool"
19
+ tool_description "Simple tool included alongside meta tools"
20
+ tool_schema({ type: "object", properties: {}, additionalProperties: false })
21
+
22
+ def run(_args)
23
+ "hello from demo tool"
24
+ end
25
+ end
26
+
27
+ driver = VSM::Drivers::OpenAI::AsyncDriver.new(api_key: API_KEY, model: MODEL)
28
+
29
+ SYSTEM_PROMPT = <<~PROMPT
30
+ You are the steward of a VSM capsule. You have access to built-in reflection
31
+ tools that describe the organism and its operations:
32
+ - meta_summarize_self: overview of the current capsule and its roles
33
+ - meta_list_tools: list available tools with schemas
34
+ - meta_explain_tool: show implementation details for a named tool
35
+ - meta_explain_role: show capsule-specific details and code for a VSM role
36
+ When the user asks about capabilities, available tools, or how something
37
+ works, call the appropriate meta_* tool first, then respond with a clear,
38
+ human-friendly summary that cites relevant tool names. Be concise but
39
+ complete.
40
+ PROMPT
41
+
42
+ cap = VSM::DSL.define(:meta_demo_llm) do
43
+ identity klass: VSM::Identity, args: { identity: "meta_demo_llm", invariants: [] }
44
+ governance klass: VSM::Governance, args: {}
45
+ coordination klass: VSM::Coordination, args: {}
46
+ intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: SYSTEM_PROMPT }
47
+ monitoring klass: VSM::Monitoring, args: {}
48
+ operations do
49
+ meta_tools
50
+ capsule :meta_demo_tool, klass: MetaDemoTool
51
+ end
52
+ end
53
+
54
+ ports = [VSM::Ports::ChatTTY.new(capsule: cap, banner: ->(io) { io.puts "Meta demo ready. Try asking 'What can you do?'" })]
55
+
56
+ VSM::Runtime.start(cap, ports: ports)
data/exe/vsm ADDED
@@ -0,0 +1,17 @@
1
+ #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
3
+
4
+ # Keep CLI independent of any project's Bundler context so we resolve this
5
+ # gem's dependencies rather than a host app's Gemfile.
6
+ ENV.delete('BUNDLE_GEMFILE')
7
+ ENV.delete('BUNDLE_BIN_PATH')
8
+ if (rubyopt = ENV['RUBYOPT'])
9
+ ENV['RUBYOPT'] = rubyopt.split.reject { |x| x.include?('bundler/setup') }.join(' ')
10
+ end
11
+ ENV.delete('RUBYGEMS_GEMDEPS')
12
+
13
+ require 'vsm'
14
+ require 'vsm/cli'
15
+
16
+ VSM::CLI.start(ARGV)
17
+
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ module VSM
4
+ class AsyncChannel
5
+ attr_reader :context
6
+
7
+ def initialize(context: {})
8
+ @queue = Async::Queue.new
9
+ @subs = []
10
+ @context = context
11
+ end
12
+
13
+ def emit(message)
14
+ begin
15
+ @queue.enqueue(message)
16
+ rescue StandardError
17
+ # If no async scheduler is available in this thread, best-effort enqueue later.
18
+ end
19
+ @subs.each do |blk|
20
+ begin
21
+ Async { blk.call(message) }
22
+ rescue StandardError
23
+ # Fallback when no Async task is active in this thread
24
+ begin
25
+ blk.call(message)
26
+ rescue StandardError
27
+ # ignore subscriber errors
28
+ end
29
+ end
30
+ end
31
+ end
32
+
33
+ def pop = @queue.dequeue
34
+
35
+ def subscribe(&blk)
36
+ @subs << blk
37
+ blk
38
+ end
39
+
40
+ def unsubscribe(subscriber)
41
+ @subs.delete(subscriber)
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+ require "async"
3
+ module VSM
4
+ class Capsule
5
+ attr_reader :name, :bus, :homeostat, :roles, :children
6
+
7
+ def initialize(name:, roles:, children: {})
8
+ @name = name.to_sym
9
+ @roles = roles
10
+ @children = children
11
+ ctx = { operations_children: children.transform_keys(&:to_s) }
12
+ @bus = AsyncChannel.new(context: ctx)
13
+ @homeostat = Homeostat.new
14
+ # Inject bus into children that accept it, to enable richer observability
15
+ @children.each_value { |c| c.bus = @bus if c.respond_to?(:bus=) }
16
+ wire_observers!
17
+ end
18
+
19
+ def run
20
+ Async do
21
+ loop do
22
+ message = @bus.pop
23
+ roles[:coordination].stage(message)
24
+ roles[:coordination].drain(@bus) { |m| dispatch(m) }
25
+ end
26
+ end
27
+ end
28
+
29
+ def dispatch(message)
30
+ return roles[:identity].alert(message) if homeostat.alarm?(message)
31
+ roles[:governance].enforce(message) { route(_1) }
32
+ end
33
+
34
+ def route(message)
35
+ roles[:operations].handle(message, bus: @bus, children: @children) ||
36
+ roles[:intelligence].handle(message, bus: @bus) ||
37
+ roles[:identity].handle(message, bus: @bus)
38
+ end
39
+
40
+ private
41
+
42
+ def wire_observers!
43
+ roles.values.each { |r| r.respond_to?(:observe) && r.observe(@bus) }
44
+ end
45
+ end
46
+ end
data/lib/vsm/cli.rb ADDED
@@ -0,0 +1,78 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'optparse'
4
+ require_relative 'generator/new_project'
5
+
6
+ module VSM
7
+ class CLI
8
+ def self.start(argv = ARGV)
9
+ new.run(argv)
10
+ end
11
+
12
+ def run(argv)
13
+ cmd = argv.shift
14
+ case cmd
15
+ when 'new'
16
+ run_new(argv)
17
+ when nil, '-h', '--help', 'help'
18
+ puts help_text
19
+ else
20
+ warn "Unknown command: #{cmd}\n"
21
+ puts help_text
22
+ exit 1
23
+ end
24
+ end
25
+
26
+ private
27
+
28
+ def run_new(argv)
29
+ opts = {
30
+ path: nil,
31
+ git: false,
32
+ bundle: false,
33
+ provider: 'openai',
34
+ model: nil,
35
+ force: false
36
+ }
37
+ parser = OptionParser.new do |o|
38
+ o.banner = "Usage: vsm new <name> [options]"
39
+ o.on('--path PATH', 'Target directory (default: ./<name>)') { |v| opts[:path] = v }
40
+ o.on('--git', 'Run git init and initial commit') { opts[:git] = true }
41
+ o.on('--bundle', 'Run bundle install after generation') { opts[:bundle] = true }
42
+ o.on('--with-llm PROVIDER', %w[openai anthropic gemini], 'LLM provider: openai (default), anthropic, or gemini') { |v| opts[:provider] = v }
43
+ o.on('--model NAME', 'Default model name') { |v| opts[:model] = v }
44
+ o.on('--force', 'Overwrite existing directory') { opts[:force] = true }
45
+ o.on('-h', '--help', 'Show help') { puts o; exit 0 }
46
+ end
47
+
48
+ name = nil
49
+ begin
50
+ parser.order!(argv)
51
+ name = argv.shift
52
+ rescue OptionParser::ParseError => e
53
+ warn e.message
54
+ puts parser
55
+ exit 1
56
+ end
57
+
58
+ unless name && !name.strip.empty?
59
+ warn 'Please provide a project name, e.g., vsm new my_app'
60
+ puts parser
61
+ exit 1
62
+ end
63
+
64
+ VSM::Generator::NewProject.run(name: name, **opts)
65
+ end
66
+
67
+ def help_text
68
+ <<~TXT
69
+ VSM CLI
70
+
71
+ Commands:
72
+ vsm new <name> [options] Create a new VSM app skeleton
73
+
74
+ Run `vsm new --help` for options.
75
+ TXT
76
+ end
77
+ end
78
+ end
@@ -0,0 +1,210 @@
1
+ # frozen_string_literal: true
2
+ require "json"
3
+ require "net/http"
4
+ require "uri"
5
+ require "securerandom"
6
+
7
+ module VSM
8
+ module Drivers
9
+ module Anthropic
10
+ class AsyncDriver
11
+ def initialize(api_key:, model:, base_url: "https://api.anthropic.com/v1", version: "2023-06-01")
12
+ @api_key, @model, @base, @version = api_key, model, base_url, version
13
+ end
14
+
15
+ def run!(conversation:, tools:, policy: {}, &emit)
16
+ # Always use Net::HTTP with SSE
17
+ emitted_terminal = false
18
+
19
+ headers = {
20
+ "x-api-key" => @api_key,
21
+ "anthropic-version" => @version,
22
+ "content-type" => "application/json",
23
+ "accept" => "text/event-stream"
24
+ }
25
+
26
+ messages = to_anthropic_messages(conversation, policy[:system_prompt])
27
+ tool_list = normalize_anthropic_tools(tools)
28
+ payload = {
29
+ model: @model,
30
+ system: policy[:system_prompt],
31
+ messages: messages,
32
+ max_tokens: 512,
33
+ stream: true
34
+ }
35
+ if tool_list.any?
36
+ payload[:tools] = tool_list
37
+ payload[:tool_choice] = { type: "auto" }
38
+ end
39
+ body = JSON.dump(payload)
40
+
41
+ url = URI.parse("#{@base}/messages")
42
+ http = Net::HTTP.new(url.host, url.port)
43
+ http.use_ssl = (url.scheme == "https")
44
+ http.read_timeout = 120
45
+
46
+ req = Net::HTTP::Post.new(url.request_uri)
47
+ headers.each { |k,v| req[k] = v }
48
+ req.body = body
49
+
50
+ res = http.request(req) do |response|
51
+ ct = response["content-type"]
52
+ if response.code.to_i != 200
53
+ err_body = +""
54
+ response.read_body { |chunk| err_body << chunk }
55
+ preview = err_body.to_s.byteslice(0, 400)
56
+ emit.call(:assistant_final, "Anthropic HTTP #{response.code}: #{preview}")
57
+ emitted_terminal = true
58
+ next
59
+ end
60
+
61
+ if ct && ct.include?("text/event-stream")
62
+ buffer = +""
63
+ textbuf = +""
64
+ toolbuf = {}
65
+ tool_calls = []
66
+
67
+ response.read_body do |chunk|
68
+ buffer << chunk
69
+ while (i = buffer.index("\n"))
70
+ line = buffer.slice!(0..i)
71
+ line.chomp!
72
+ next unless line.start_with?("data:")
73
+ data = line.sub("data:","").strip
74
+ next if data.empty? || data == "[DONE]"
75
+ obj = JSON.parse(data) rescue nil
76
+ next unless obj
77
+ ev = obj["type"].to_s
78
+ if ENV["VSM_DEBUG_STREAM"] == "1"
79
+ $stderr.puts "anthropic(nethttp) <= #{ev}: #{data.byteslice(0, 160)}"
80
+ end
81
+
82
+ case ev
83
+ when "content_block_delta"
84
+ idx = obj["index"]; delta = obj["delta"] || {}
85
+ case delta["type"]
86
+ when "text_delta"
87
+ part = delta["text"].to_s
88
+ textbuf << part
89
+ emit.call(:assistant_delta, part)
90
+ when "input_json_delta"
91
+ toolbuf[idx] ||= { id: nil, name: nil, json: +"" }
92
+ toolbuf[idx][:json] << (delta["partial_json"] || "")
93
+ end
94
+ when "content_block_start"
95
+ # For anthropic, the key can be 'content' or 'content_block'
96
+ c = obj["content"] || obj["content_block"] || {}
97
+ if c["type"] == "tool_use"
98
+ name = c["name"] || obj["name"]
99
+ toolbuf[obj["index"]] = { id: c["id"], name: name, json: +"" }
100
+ end
101
+ when "content_block_stop"
102
+ idx = obj["index"]
103
+ if tb = toolbuf[idx]
104
+ args = tb[:json].empty? ? {} : (JSON.parse(tb[:json]) rescue {"_raw"=>tb[:json]})
105
+ # Only enqueue if name is present
106
+ if tb[:name].to_s.strip != "" && tb[:id]
107
+ tool_calls << { id: tb[:id], name: tb[:name], arguments: args }
108
+ end
109
+ end
110
+ when "message_stop"
111
+ if tool_calls.any?
112
+ emit.call(:tool_calls, tool_calls)
113
+ else
114
+ emit.call(:assistant_final, textbuf.dup)
115
+ end
116
+ emitted_terminal = true
117
+ end
118
+ end
119
+ end
120
+
121
+ unless emitted_terminal
122
+ # If the stream closed without a terminal, emit final text
123
+ emit.call(:assistant_final, textbuf)
124
+ emitted_terminal = true
125
+ end
126
+ else
127
+ # Non-streaming JSON
128
+ data = ""
129
+ response.read_body { |chunk| data << chunk }
130
+ obj = JSON.parse(data) rescue {}
131
+ parts = Array(obj.dig("content"))
132
+ calls = []
133
+ text = +""
134
+ parts.each do |p|
135
+ case p["type"]
136
+ when "text" then text << p["text"].to_s
137
+ when "tool_use" then calls << { id: p["id"] || SecureRandom.uuid, name: p["name"], arguments: p["input"] || {} }
138
+ end
139
+ end
140
+ if calls.any?
141
+ emit.call(:tool_calls, calls)
142
+ else
143
+ emit.call(:assistant_final, text)
144
+ end
145
+ emitted_terminal = true
146
+ end
147
+ end
148
+
149
+ :done
150
+ end
151
+
152
+ private
153
+ # (no IPv6/IPv4 forcing; rely on default Internet)
154
+ def normalize_anthropic_tools(tools)
155
+ Array(tools).map { |t| normalize_anthropic_tool(t) }
156
+ end
157
+
158
+ def normalize_anthropic_tool(t)
159
+ return t.to_anthropic_tool if t.respond_to?(:to_anthropic_tool)
160
+
161
+ # Provider-shaped: {name:, description:, input_schema: {…}}
162
+ if t.is_a?(Hash) && (t[:input_schema] || t["input_schema"])
163
+ return t
164
+ end
165
+
166
+ # Neutral hash {name:, description:, schema:}
167
+ if t.is_a?(Hash) && (t[:name] || t["name"])
168
+ return {
169
+ name: t[:name] || t["name"],
170
+ description: t[:description] || t["description"] || "",
171
+ input_schema: t[:schema] || t["schema"] || {}
172
+ }
173
+ end
174
+
175
+ raise TypeError, "unsupported tool descriptor: #{t.inspect}"
176
+ end
177
+
178
+
179
+ def to_anthropic_messages(neutral, _system)
180
+ # Build content blocks per message; keep ordering
181
+ neutral.map do |m|
182
+ case m[:role]
183
+ when "user"
184
+ { role: "user", content: [{ type: "text", text: m[:content].to_s }] }
185
+ when "assistant"
186
+ { role: "assistant", content: [{ type: "text", text: m[:content].to_s }] }
187
+ when "assistant_tool_calls"
188
+ blocks = Array(m[:tool_calls]).map { |c|
189
+ { type: "tool_use", id: c[:id], name: c[:name], input: c[:arguments] || {} }
190
+ }
191
+ { role: "assistant", content: blocks }
192
+ when "tool_result"
193
+ { role: "user", content: [{ type: "tool_result", tool_use_id: m[:tool_call_id], content: m[:content].to_s }] }
194
+ end
195
+ end.compact
196
+ end
197
+
198
+ def extract_sse_line!(buffer)
199
+ if (i = buffer.index("\n"))
200
+ line = buffer.slice!(0..i)
201
+ line.chomp!
202
+ return line
203
+ end
204
+ nil
205
+ end
206
+ end
207
+ end
208
+ end
209
+ end
210
+
@@ -0,0 +1,16 @@
1
+ # frozen_string_literal: true
2
+ module VSM
3
+ module Drivers
4
+ module Family
5
+ def self.of(driver)
6
+ case driver
7
+ when VSM::Drivers::OpenAI::AsyncDriver then :openai
8
+ when VSM::Drivers::Anthropic::AsyncDriver then :anthropic
9
+ when VSM::Drivers::Gemini::AsyncDriver then :gemini
10
+ else :openai
11
+ end
12
+ end
13
+ end
14
+ end
15
+ end
16
+