vsm 0.0.1 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/.claude/settings.local.json +17 -0
  3. data/CLAUDE.md +134 -0
  4. data/README.md +531 -17
  5. data/examples/01_echo_tool.rb +70 -0
  6. data/examples/02_openai_streaming.rb +73 -0
  7. data/examples/02b_anthropic_streaming.rb +61 -0
  8. data/examples/02c_gemini_streaming.rb +60 -0
  9. data/examples/03_openai_tools.rb +106 -0
  10. data/examples/03b_anthropic_tools.rb +96 -0
  11. data/examples/03c_gemini_tools.rb +95 -0
  12. data/lib/vsm/async_channel.rb +21 -0
  13. data/lib/vsm/capsule.rb +44 -0
  14. data/lib/vsm/drivers/anthropic/async_driver.rb +210 -0
  15. data/lib/vsm/drivers/family.rb +16 -0
  16. data/lib/vsm/drivers/gemini/async_driver.rb +149 -0
  17. data/lib/vsm/drivers/openai/async_driver.rb +202 -0
  18. data/lib/vsm/dsl.rb +50 -0
  19. data/lib/vsm/executors/fiber_executor.rb +10 -0
  20. data/lib/vsm/executors/thread_executor.rb +19 -0
  21. data/lib/vsm/homeostat.rb +19 -0
  22. data/lib/vsm/lens/event_hub.rb +73 -0
  23. data/lib/vsm/lens/server.rb +188 -0
  24. data/lib/vsm/lens/stats.rb +58 -0
  25. data/lib/vsm/lens/tui.rb +88 -0
  26. data/lib/vsm/lens.rb +79 -0
  27. data/lib/vsm/message.rb +6 -0
  28. data/lib/vsm/observability/ledger.rb +25 -0
  29. data/lib/vsm/port.rb +11 -0
  30. data/lib/vsm/roles/coordination.rb +49 -0
  31. data/lib/vsm/roles/governance.rb +9 -0
  32. data/lib/vsm/roles/identity.rb +11 -0
  33. data/lib/vsm/roles/intelligence.rb +168 -0
  34. data/lib/vsm/roles/operations.rb +33 -0
  35. data/lib/vsm/runtime.rb +18 -0
  36. data/lib/vsm/tool/acts_as_tool.rb +20 -0
  37. data/lib/vsm/tool/capsule.rb +12 -0
  38. data/lib/vsm/tool/descriptor.rb +16 -0
  39. data/lib/vsm/version.rb +1 -1
  40. data/lib/vsm.rb +33 -0
  41. data/llms.txt +322 -0
  42. metadata +67 -25
@@ -0,0 +1,73 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Example: OpenAI streaming demo (no tools)
4
+ #
5
+ # Usage:
6
+ # OPENAI_API_KEY=... AIRB_MODEL=gpt-4o-mini ruby examples/02_openai_streaming.rb
7
+ # VSM_DEBUG_STREAM=1 to see low-level logs
8
+
9
+ $LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
10
+ require "securerandom"
11
+ require "vsm"
12
+
13
+ MODEL = ENV["AIRB_MODEL"] || "gpt-4o-mini"
14
+
15
+ driver = VSM::Drivers::OpenAI::AsyncDriver.new(
16
+ api_key: ENV.fetch("OPENAI_API_KEY"),
17
+ model: MODEL
18
+ )
19
+
20
+ system_prompt = <<~PROMPT
21
+ You are a concise assistant. Answer briefly.
22
+ PROMPT
23
+
24
+ cap = VSM::DSL.define(:openai_stream_demo) do
25
+ identity klass: VSM::Identity, args: { identity: "openai_stream_demo", invariants: [] }
26
+ governance klass: VSM::Governance
27
+ coordination klass: VSM::Coordination
28
+ intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
29
+ operations klass: VSM::Operations
30
+ monitoring klass: VSM::Monitoring
31
+ end
32
+
33
+ if ENV["VSM_LENS"] == "1"
34
+ VSM::Lens.attach!(cap, port: (ENV["VSM_LENS_PORT"] || 9292).to_i, token: ENV["VSM_LENS_TOKEN"]) rescue nil
35
+ end
36
+
37
+ class StreamTTY < VSM::Port
38
+ def should_render?(message)
39
+ [:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind)
40
+ end
41
+
42
+ def loop
43
+ sid = SecureRandom.uuid
44
+ puts "openai streaming demo — type to chat (Ctrl-C to exit)"
45
+ print "You: "
46
+ while (line = $stdin.gets&.chomp)
47
+ @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
48
+ @capsule.roles[:coordination].wait_for_turn_end(sid)
49
+ print "You: "
50
+ end
51
+ end
52
+
53
+ def render_out(msg)
54
+ case msg.kind
55
+ when :assistant_delta
56
+ # Stream without newline
57
+ print msg.payload
58
+ $stdout.flush
59
+ when :assistant
60
+ puts "" # end the line
61
+ puts msg.payload.to_s unless msg.payload.to_s.empty?
62
+ puts "(turn #{msg.meta&.dig(:turn_id)})"
63
+ when :tool_result
64
+ puts "\nTool> #{msg.payload}"
65
+ when :tool_call
66
+ puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id}) #{msg.payload[:args].inspect}"
67
+ end
68
+ end
69
+ end
70
+
71
+ VSM::Runtime.start(cap, ports: [StreamTTY.new(capsule: cap)])
72
+
73
+
@@ -0,0 +1,61 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Example: Anthropic streaming demo (no tools)
4
+
5
+ $LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
6
+ require "securerandom"
7
+ require "vsm"
8
+
9
+ MODEL = ENV["AIRB_MODEL"] || "claude-sonnet-4-0"
10
+
11
+ driver = VSM::Drivers::Anthropic::AsyncDriver.new(
12
+ api_key: ENV.fetch("ANTHROPIC_API_KEY"),
13
+ model: MODEL,
14
+ streaming: true,
15
+ transport: :nethttp
16
+ )
17
+
18
+ system_prompt = "You are a concise assistant. Answer briefly."
19
+
20
+ cap = VSM::DSL.define(:anthropic_stream_demo) do
21
+ identity klass: VSM::Identity, args: { identity: "anthropic_stream_demo", invariants: [] }
22
+ governance klass: VSM::Governance
23
+ coordination klass: VSM::Coordination
24
+ intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
25
+ operations klass: VSM::Operations
26
+ monitoring klass: VSM::Monitoring
27
+ end
28
+
29
+ class StreamTTY < VSM::Port
30
+ def should_render?(message)
31
+ [:assistant_delta, :assistant].include?(message.kind) || message.kind == :tool_calls
32
+ end
33
+
34
+ def loop
35
+ sid = SecureRandom.uuid
36
+ puts "anthropic streaming demo — type to chat (Ctrl-C to exit)"
37
+ print "You: "
38
+ while (line = $stdin.gets&.chomp)
39
+ @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
40
+ @capsule.roles[:coordination].wait_for_turn_end(sid)
41
+ print "You: "
42
+ end
43
+ end
44
+
45
+ def render_out(msg)
46
+ case msg.kind
47
+ when :assistant_delta
48
+ print msg.payload
49
+ $stdout.flush
50
+ when :assistant
51
+ puts ""
52
+ puts "(turn #{msg.meta&.dig(:turn_id)})"
53
+ when :tool_calls
54
+ puts "\n(tool_calls #{msg.payload&.size || 0})"
55
+ end
56
+ end
57
+ end
58
+
59
+ VSM::Runtime.start(cap, ports: [StreamTTY.new(capsule: cap)])
60
+
61
+
@@ -0,0 +1,60 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Example: Gemini streaming demo (no tools)
4
+
5
+ $LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
6
+ require "securerandom"
7
+ require "vsm"
8
+
9
+ MODEL = ENV["AIRB_MODEL"] || "gemini-2.5-flash"
10
+
11
+ driver = VSM::Drivers::Gemini::AsyncDriver.new(
12
+ api_key: ENV.fetch("GEMINI_API_KEY"),
13
+ model: MODEL,
14
+ streaming: true
15
+ )
16
+
17
+ system_prompt = "You are a concise assistant. Answer briefly."
18
+
19
+ cap = VSM::DSL.define(:gemini_stream_demo) do
20
+ identity klass: VSM::Identity, args: { identity: "gemini_stream_demo", invariants: [] }
21
+ governance klass: VSM::Governance
22
+ coordination klass: VSM::Coordination
23
+ intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
24
+ operations klass: VSM::Operations
25
+ monitoring klass: VSM::Monitoring
26
+ end
27
+
28
+ class StreamTTY < VSM::Port
29
+ def should_render?(message)
30
+ [:assistant_delta, :assistant].include?(message.kind) || message.kind == :tool_calls
31
+ end
32
+
33
+ def loop
34
+ sid = SecureRandom.uuid
35
+ puts "gemini streaming demo — type to chat (Ctrl-C to exit)"
36
+ print "You: "
37
+ while (line = $stdin.gets&.chomp)
38
+ @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
39
+ @capsule.roles[:coordination].wait_for_turn_end(sid)
40
+ print "You: "
41
+ end
42
+ end
43
+
44
+ def render_out(msg)
45
+ case msg.kind
46
+ when :assistant_delta
47
+ print msg.payload
48
+ $stdout.flush
49
+ when :assistant
50
+ puts ""
51
+ puts "(turn #{msg.meta&.dig(:turn_id)})"
52
+ when :tool_calls
53
+ puts "\n(tool_calls #{msg.payload&.size || 0})"
54
+ end
55
+ end
56
+ end
57
+
58
+ VSM::Runtime.start(cap, ports: [StreamTTY.new(capsule: cap)])
59
+
60
+
@@ -0,0 +1,106 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Example: OpenAI tool-calling demo (list_files/read_file)
4
+ #
5
+ # Usage:
6
+ # OPENAI_API_KEY=... AIRB_MODEL=gpt-4o-mini ruby examples/03_openai_tools.rb
7
+ # VSM_DEBUG_STREAM=1 to see low-level logs
8
+
9
+ $LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
10
+ require "securerandom"
11
+ require "json"
12
+ require "vsm"
13
+
14
+ MODEL = ENV["AIRB_MODEL"] || "gpt-4o-mini"
15
+
16
+ # Simple file tools scoped to current working directory
17
+ class ListFiles < VSM::ToolCapsule
18
+ tool_name "list_files"
19
+ tool_description "List files in a directory"
20
+ tool_schema({ type: "object", properties: { path: { type: "string" } }, required: [] })
21
+ def run(args)
22
+ path = args["path"].to_s.strip
23
+ path = "." if path.empty?
24
+ entries = Dir.children(path).sort.take(200)
25
+ entries.join("\n")
26
+ rescue => e
27
+ "ERROR: #{e.class}: #{e.message}"
28
+ end
29
+ end
30
+
31
+ class ReadFile < VSM::ToolCapsule
32
+ tool_name "read_file"
33
+ tool_description "Read a small text file"
34
+ tool_schema({ type: "object", properties: { path: { type: "string" } }, required: ["path"] })
35
+ def run(args)
36
+ path = args["path"].to_s
37
+ raise "path required" if path.empty?
38
+ raise "too large" if File.size(path) > 200_000
39
+ File.read(path)
40
+ rescue => e
41
+ "ERROR: #{e.class}: #{e.message}"
42
+ end
43
+ end
44
+
45
+ driver = VSM::Drivers::OpenAI::AsyncDriver.new(
46
+ api_key: ENV.fetch("OPENAI_API_KEY"),
47
+ model: MODEL
48
+ )
49
+
50
+ system_prompt = <<~PROMPT
51
+ You are a coding assistant with two tools: list_files and read_file.
52
+ Prefer to call tools when appropriate. Keep answers brief.
53
+ PROMPT
54
+
55
+ cap = VSM::DSL.define(:openai_tools_demo) do
56
+ identity klass: VSM::Identity, args: { identity: "openai_tools_demo", invariants: [] }
57
+ governance klass: VSM::Governance
58
+ coordination klass: VSM::Coordination
59
+ intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
60
+ monitoring klass: VSM::Monitoring
61
+ operations do
62
+ capsule :list_files, klass: ListFiles
63
+ capsule :read_file, klass: ReadFile
64
+ end
65
+ end
66
+
67
+ if ENV["VSM_LENS"] == "1"
68
+ VSM::Lens.attach!(cap, port: (ENV["VSM_LENS_PORT"] || 9292).to_i, token: ENV["VSM_LENS_TOKEN"]) rescue nil
69
+ end
70
+
71
+ class ToolTTY < VSM::Port
72
+ def should_render?(message)
73
+ [:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind)
74
+ end
75
+
76
+ def loop
77
+ sid = SecureRandom.uuid
78
+ puts "openai tools demo — type to chat (Ctrl-C to exit)"
79
+ print "You: "
80
+ while (line = $stdin.gets&.chomp)
81
+ @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
82
+ @capsule.roles[:coordination].wait_for_turn_end(sid)
83
+ print "You: "
84
+ end
85
+ end
86
+
87
+ def render_out(msg)
88
+ case msg.kind
89
+ when :assistant_delta
90
+ print msg.payload
91
+ $stdout.flush
92
+ when :assistant
93
+ puts ""
94
+ puts "(turn #{msg.meta&.dig(:turn_id)})"
95
+ when :tool_call
96
+ puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id})"
97
+ when :tool_result
98
+ puts "\nTool> (completed)"
99
+ end
100
+ end
101
+ end
102
+
103
+ VSM::Runtime.start(cap, ports: [ToolTTY.new(capsule: cap)])
104
+
105
+
106
+
@@ -0,0 +1,96 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Example: Anthropic tool-calling demo (list_files/read_file)
4
+
5
+ $LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
6
+ require "securerandom"
7
+ require "vsm"
8
+
9
+ MODEL = ENV["AIRB_MODEL"] || "claude-sonnet-4-0"
10
+
11
+ class ListFiles < VSM::ToolCapsule
12
+ tool_name "list_files"
13
+ tool_description "List files in a directory"
14
+ tool_schema({ type: "object", properties: { path: { type: "string" } }, required: [] })
15
+ def run(args)
16
+ path = args["path"].to_s.strip
17
+ path = "." if path.empty?
18
+ Dir.children(path).sort.take(200).join("\n")
19
+ rescue => e
20
+ "ERROR: #{e.class}: #{e.message}"
21
+ end
22
+ end
23
+
24
+ class ReadFile < VSM::ToolCapsule
25
+ tool_name "read_file"
26
+ tool_description "Read a small text file"
27
+ tool_schema({ type: "object", properties: { path: { type: "string" } }, required: ["path"] })
28
+ def run(args)
29
+ path = args["path"].to_s
30
+ raise "path required" if path.empty?
31
+ raise "too large" if File.size(path) > 200_000
32
+ File.read(path)
33
+ rescue => e
34
+ "ERROR: #{e.class}: #{e.message}"
35
+ end
36
+ end
37
+
38
+ driver = VSM::Drivers::Anthropic::AsyncDriver.new(
39
+ api_key: ENV.fetch("ANTHROPIC_API_KEY"),
40
+ model: MODEL,
41
+ streaming: true,
42
+ transport: :nethttp
43
+ )
44
+
45
+ system_prompt = <<~PROMPT
46
+ You are a coding assistant with two tools: list_files and read_file.
47
+ Prefer to call tools when appropriate. Keep answers brief.
48
+ PROMPT
49
+
50
+ cap = VSM::DSL.define(:anthropic_tools_demo) do
51
+ identity klass: VSM::Identity, args: { identity: "anthropic_tools_demo", invariants: [] }
52
+ governance klass: VSM::Governance
53
+ coordination klass: VSM::Coordination
54
+ intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
55
+ monitoring klass: VSM::Monitoring
56
+ operations do
57
+ capsule :list_files, klass: ListFiles
58
+ capsule :read_file, klass: ReadFile
59
+ end
60
+ end
61
+
62
+ class ToolTTY < VSM::Port
63
+ def should_render?(message)
64
+ [:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind)
65
+ end
66
+
67
+ def loop
68
+ sid = SecureRandom.uuid
69
+ puts "anthropic tools demo — type to chat (Ctrl-C to exit)"
70
+ print "You: "
71
+ while (line = $stdin.gets&.chomp)
72
+ @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
73
+ @capsule.roles[:coordination].wait_for_turn_end(sid)
74
+ print "You: "
75
+ end
76
+ end
77
+
78
+ def render_out(msg)
79
+ case msg.kind
80
+ when :assistant_delta
81
+ print msg.payload
82
+ $stdout.flush
83
+ when :assistant
84
+ puts ""
85
+ puts "(turn #{msg.meta&.dig(:turn_id)})"
86
+ when :tool_call
87
+ puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id})"
88
+ when :tool_result
89
+ puts "\nTool> (completed)"
90
+ end
91
+ end
92
+ end
93
+
94
+ VSM::Runtime.start(cap, ports: [ToolTTY.new(capsule: cap)])
95
+
96
+
@@ -0,0 +1,95 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Example: Gemini tool-calling demo (list_files/read_file) with streaming enabled
4
+
5
+ $LOAD_PATH.unshift(File.expand_path("../lib", __dir__))
6
+ require "securerandom"
7
+ require "vsm"
8
+
9
+ MODEL = ENV["AIRB_MODEL"] || "gemini-2.5-flash"
10
+
11
+ class ListFiles < VSM::ToolCapsule
12
+ tool_name "list_files"
13
+ tool_description "List files in a directory"
14
+ tool_schema({ type: "object", properties: { path: { type: "string" } }, required: [] })
15
+ def run(args)
16
+ path = args["path"].to_s.strip
17
+ path = "." if path.empty?
18
+ Dir.children(path).sort.take(200).join("\n")
19
+ rescue => e
20
+ "ERROR: #{e.class}: #{e.message}"
21
+ end
22
+ end
23
+
24
+ class ReadFile < VSM::ToolCapsule
25
+ tool_name "read_file"
26
+ tool_description "Read a small text file"
27
+ tool_schema({ type: "object", properties: { path: { type: "string" } }, required: ["path"] })
28
+ def run(args)
29
+ path = args["path"].to_s
30
+ raise "path required" if path.empty?
31
+ raise "too large" if File.size(path) > 200_000
32
+ File.read(path)
33
+ rescue => e
34
+ "ERROR: #{e.class}: #{e.message}"
35
+ end
36
+ end
37
+
38
+ driver = VSM::Drivers::Gemini::AsyncDriver.new(
39
+ api_key: ENV.fetch("GEMINI_API_KEY"),
40
+ model: MODEL,
41
+ streaming: true
42
+ )
43
+
44
+ system_prompt = <<~PROMPT
45
+ You are a coding assistant with two tools: list_files and read_file.
46
+ Prefer to call tools when appropriate. Keep answers brief.
47
+ PROMPT
48
+
49
+ cap = VSM::DSL.define(:gemini_tools_demo) do
50
+ identity klass: VSM::Identity, args: { identity: "gemini_tools_demo", invariants: [] }
51
+ governance klass: VSM::Governance
52
+ coordination klass: VSM::Coordination
53
+ intelligence klass: VSM::Intelligence, args: { driver: driver, system_prompt: system_prompt }
54
+ monitoring klass: VSM::Monitoring
55
+ operations do
56
+ capsule :list_files, klass: ListFiles
57
+ capsule :read_file, klass: ReadFile
58
+ end
59
+ end
60
+
61
+ class ToolTTY < VSM::Port
62
+ def should_render?(message)
63
+ [:assistant_delta, :assistant, :tool_result, :tool_call].include?(message.kind)
64
+ end
65
+
66
+ def loop
67
+ sid = SecureRandom.uuid
68
+ puts "gemini tools demo — type to chat (Ctrl-C to exit)"
69
+ print "You: "
70
+ while (line = $stdin.gets&.chomp)
71
+ @capsule.bus.emit VSM::Message.new(kind: :user, payload: line, meta: { session_id: sid })
72
+ @capsule.roles[:coordination].wait_for_turn_end(sid)
73
+ print "You: "
74
+ end
75
+ end
76
+
77
+ def render_out(msg)
78
+ case msg.kind
79
+ when :assistant_delta
80
+ print msg.payload
81
+ $stdout.flush
82
+ when :assistant
83
+ puts ""
84
+ puts "(turn #{msg.meta&.dig(:turn_id)})"
85
+ when :tool_call
86
+ puts "\nTool? #{msg.payload[:tool]}(#{msg.corr_id})"
87
+ when :tool_result
88
+ puts "\nTool> (completed)"
89
+ end
90
+ end
91
+ end
92
+
93
+ VSM::Runtime.start(cap, ports: [ToolTTY.new(capsule: cap)])
94
+
95
+
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module VSM
4
+ class AsyncChannel
5
+ attr_reader :context
6
+
7
+ def initialize(context: {})
8
+ @queue = Async::Queue.new
9
+ @subs = []
10
+ @context = context
11
+ end
12
+
13
+ def emit(message)
14
+ @queue.enqueue(message)
15
+ @subs.each { |blk| Async { blk.call(message) } }
16
+ end
17
+
18
+ def pop = @queue.dequeue
19
+ def subscribe(&blk) = @subs << blk
20
+ end
21
+ end
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+ require "async"
3
+ module VSM
4
+ class Capsule
5
+ attr_reader :name, :bus, :homeostat, :roles, :children
6
+
7
+ def initialize(name:, roles:, children: {})
8
+ @name = name.to_sym
9
+ @roles = roles
10
+ @children = children
11
+ ctx = { operations_children: children.transform_keys(&:to_s) }
12
+ @bus = AsyncChannel.new(context: ctx)
13
+ @homeostat = Homeostat.new
14
+ wire_observers!
15
+ end
16
+
17
+ def run
18
+ Async do
19
+ loop do
20
+ message = @bus.pop
21
+ roles[:coordination].stage(message)
22
+ roles[:coordination].drain(@bus) { |m| dispatch(m) }
23
+ end
24
+ end
25
+ end
26
+
27
+ def dispatch(message)
28
+ return roles[:identity].alert(message) if homeostat.alarm?(message)
29
+ roles[:governance].enforce(message) { route(_1) }
30
+ end
31
+
32
+ def route(message)
33
+ roles[:operations].handle(message, bus: @bus, children: @children) ||
34
+ roles[:intelligence].handle(message, bus: @bus) ||
35
+ roles[:identity].handle(message, bus: @bus)
36
+ end
37
+
38
+ private
39
+
40
+ def wire_observers!
41
+ roles.values.each { |r| r.respond_to?(:observe) && r.observe(@bus) }
42
+ end
43
+ end
44
+ end