langgraph_rb 0.1.4 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f5649cc9cef30c96380dc2ccd9a5c576af8d3efd7e02552b03d81cb45360f408
4
- data.tar.gz: 3c706c127ac2fbd5e9d8a0be723575228a21d9ff91672169dc66174cd8dba3c5
3
+ metadata.gz: faa20cb53b8c5f9c1ea8d740d34ea63eadfe9e07a49683dd3c090f7bb042d3fd
4
+ data.tar.gz: 56bfc5f8fb39473af9252caf1573f1126e30405f37ee7ac45bfde6704df20575
5
5
  SHA512:
6
- metadata.gz: 7dd32ea2d6c98ae356596cee6ca0d697802e7107649faf63c598606f9b708a68cd7c72e72d4ad5423d136a72d74c242108c28b670f6ab2aea2cf3a18925f35d4
7
- data.tar.gz: d6a30010935f797cc6ebbe3767db38c8726029956192b57cd69304d6b0238d3dd0bdb1b32d7d9b348bbc8ee4469f8b1fe62b750386834c9cb7ce52980f1f25a4
6
+ metadata.gz: 0f24fbb2f677bb13e7a8091807ff6d3f3d835b295ee691420f30b576bbd74d05eae9040d6511319eaefbc6c04170dc501101f3ff07f5a3e08c066a3d398adc3f
7
+ data.tar.gz: 91bdd7cf66e2c73fb5103d2fe469381bce46e889cacd6710a76c012d96296d3f0625dbc3b4e992570ef67ba3c506f8031058b3e6384304d0596e2c6f10dd8a6b
data/Gemfile CHANGED
@@ -2,9 +2,12 @@ source 'https://rubygems.org'
2
2
 
3
3
  gemspec
4
4
 
5
+ gem 'openai', '~> 0.24.0'
6
+
5
7
  group :development, :test do
6
8
  gem 'rspec', '~> 3.0'
7
9
  gem 'pry', '~> 0.14'
10
+ gem 'pry-byebug'
8
11
  gem 'rubocop', '~> 1.0'
9
12
  gem 'langfuse', '~> 0.1'
10
13
  end
data/Gemfile.lock CHANGED
@@ -1,15 +1,18 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- langgraph_rb (0.1.2)
4
+ langgraph_rb (0.1.4)
5
5
  json (~> 2.0)
6
+ openai (~> 0.24.0)
6
7
 
7
8
  GEM
8
9
  remote: https://rubygems.org/
9
10
  specs:
10
11
  ast (2.4.3)
12
+ byebug (12.0.0)
11
13
  coderay (1.1.3)
12
14
  concurrent-ruby (1.3.5)
15
+ connection_pool (2.5.4)
13
16
  diff-lcs (1.6.2)
14
17
  json (2.13.2)
15
18
  langfuse (0.1.1)
@@ -18,6 +21,8 @@ GEM
18
21
  language_server-protocol (3.17.0.5)
19
22
  lint_roller (1.1.0)
20
23
  method_source (1.1.0)
24
+ openai (0.24.0)
25
+ connection_pool
21
26
  parallel (1.27.0)
22
27
  parser (3.3.9.0)
23
28
  ast (~> 2.4.1)
@@ -26,6 +31,9 @@ GEM
26
31
  pry (0.15.2)
27
32
  coderay (~> 1.1)
28
33
  method_source (~> 1.0)
34
+ pry-byebug (3.11.0)
35
+ byebug (~> 12.0)
36
+ pry (>= 0.13, < 0.16)
29
37
  racc (1.8.1)
30
38
  rainbow (3.1.1)
31
39
  rake (13.3.0)
@@ -71,7 +79,9 @@ DEPENDENCIES
71
79
  bundler (~> 2.0)
72
80
  langfuse (~> 0.1)
73
81
  langgraph_rb!
82
+ openai (~> 0.24.0)
74
83
  pry (~> 0.14)
84
+ pry-byebug
75
85
  rake (~> 13.0)
76
86
  rspec (~> 3.0)
77
87
  rubocop (~> 1.0)
@@ -123,10 +123,11 @@ def basic_example
123
123
  }
124
124
 
125
125
  # All responses go back to waiting for input (except farewell)
126
- edge :handle_greeting, :receive_input
127
- edge :handle_help, :receive_input
128
- edge :handle_weather, :receive_input
129
- edge :general_response, :receive_input
126
+ set_finish_point :handle_greeting
127
+ set_finish_point :handle_farewell
128
+ set_finish_point :handle_help
129
+ set_finish_point :handle_weather
130
+ set_finish_point :general_response
130
131
  end
131
132
 
132
133
  # Compile the graph
@@ -205,7 +206,9 @@ def streaming_example
205
206
  end
206
207
 
207
208
  # Run examples
208
- if __FILE__ == $0
209
- basic_example
210
- streaming_example
211
- end
209
+ # if __FILE__ == $0
210
+ # basic_example
211
+ # streaming_example
212
+ # end
213
+
214
+ basic_example
@@ -0,0 +1,115 @@
1
+ #!/usr/bin/env ruby
2
+ require 'pry'
3
+ require 'pry-byebug'
4
+ require_relative '../lib/langgraph_rb'
5
+
6
+ class MovieInfoTool < LangGraphRB::ToolBase
7
+ define_function :search_movie, description: "MovieInfoTool: Search for a movie by title" do
8
+ property :query, type: "string", description: "The movie title to search for", required: true
9
+ end
10
+
11
+ define_function :get_movie_details, description: "MovieInfoTool: Get detailed information about a specific movie" do
12
+ property :movie_id, type: "integer", description: "The TMDb ID of the movie", required: true
13
+ end
14
+
15
+ def initialize(api_key: "demo")
16
+ @api_key = api_key
17
+ end
18
+
19
+ def search_movie(query:)
20
+ tool_response({ results: [ { id: 603, title: query, year: 1999 } ] })
21
+ end
22
+
23
+ def get_movie_details(movie_id:)
24
+ tool_response({ id: movie_id, title: "The Matrix", overview: "A computer hacker learns the truth of reality." })
25
+ end
26
+ end
27
+
28
+ def run_chat_openai_tools
29
+ tools = [MovieInfoTool.new(api_key: ENV['TMDB_API_KEY'] || 'demo')]
30
+
31
+ chat = LangGraphRB::ChatOpenAI.new(model: ENV.fetch('OPENAI_MODEL', 'gpt-4o-mini'), temperature: 0)
32
+ chat = chat.bind_tools(tools)
33
+
34
+ graph = LangGraphRB::Graph.new do
35
+ node :receive_input do |state|
36
+ user_msg = { role: 'user', content: state[:input].to_s }
37
+ existing = state[:messages] || []
38
+ { messages: existing + [user_msg] }
39
+ end
40
+
41
+ llm_node :chat, llm_client: chat, system_prompt: "You are a movie assistant. Use tools when helpful." do |state, context|
42
+ messages = state[:messages] || []
43
+ messages = [{ role: 'system', content: context[:system_prompt] }] + messages if context[:system_prompt]
44
+
45
+ response = context[:llm_client].call(messages)
46
+
47
+ if response.is_a?(Hash) && response[:tool_calls]
48
+ assistant_msg = { role: 'assistant', content: nil, tool_calls: response[:tool_calls] }
49
+ { messages: (state[:messages] || []) + [assistant_msg], tool_call: response[:tool_calls].first }
50
+ else
51
+ assistant_msg = { role: 'assistant', content: response.to_s }
52
+ { messages: (state[:messages] || []) + [assistant_msg], last_response: response.to_s }
53
+ end
54
+ end
55
+
56
+ # node :tool do |state|
57
+ # tool_call = state[:tool_call]
58
+ # tool_name = tool_call[:name]
59
+ # tool_args = tool_call[:arguments]
60
+ # tool_call_id = tool_call[:id]
61
+
62
+ # puts "TOOL CALL #########################"
63
+ # puts "tool_name: #{tool_name}"
64
+ # puts "tool_args: #{tool_args}"
65
+ # puts "tool_call_id: #{tool_call_id}"
66
+ # puts "########################"
67
+ # puts "########################"
68
+
69
+ # tool_method_name = tool_name.to_s.split('__').last
70
+
71
+ # # Dispatch via ToolBase API to keep consistent interface
72
+ # tool_result = tools.call({ name: tool_method_name, arguments: tool_args })
73
+
74
+ # { messages: (state[:messages] || []) + [{ role: 'tool', content: tool_result.to_json, tool_call_id: tool_call_id, name: tool_name.to_s }],
75
+ # tool_call: nil }
76
+ # end
77
+
78
+ tool_node :tool, tools: tools
79
+
80
+ node :final_answer do |state|
81
+ { **state }
82
+ end
83
+
84
+ set_entry_point :receive_input
85
+ edge :receive_input, :chat
86
+
87
+ conditional_edge :chat, ->(state) {
88
+ state[:tool_call] ? "use_tool" : "final_answer"
89
+ }, {
90
+ "use_tool" => :tool,
91
+ "final_answer" => :final_answer
92
+ }
93
+
94
+ edge :tool, :chat
95
+ set_finish_point :final_answer
96
+ end
97
+
98
+ graph.compile!
99
+
100
+ start = { messages: [], input: "Find details about 'The Matrix'" }
101
+ result = graph.invoke(start)
102
+ puts "Messages:"
103
+ (result[:messages] || []).each do |m|
104
+ if m[:role] == 'assistant' && m[:tool_calls]
105
+ names = m[:tool_calls].map { |tc| tc[:name] }.join(', ')
106
+ puts "- assistant tool_calls: #{names}"
107
+ else
108
+ puts "- #{m[:role]}: #{m[:content]}"
109
+ end
110
+ end
111
+ end
112
+
113
+ run_chat_openai_tools
114
+
115
+
@@ -0,0 +1,38 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require_relative '../lib/langgraph_rb'
4
+
5
+ graph = LangGraphRB::Graph.new do
6
+ node :greeting do |state|
7
+ { message: "Hello, how can I help you today?" }
8
+ end
9
+
10
+ node :analyze_intent do |state|
11
+ { intent: state[:message].downcase.include?("weather") ? "weather" : "general" }
12
+ end
13
+
14
+ conditional_edge :analyze_intent, ->(state) { state[:intent] }, {
15
+ "weather" => :weather_response,
16
+ "general" => :general_response
17
+ }
18
+
19
+ node :weather_response do |state|
20
+ { message: "The weather is sunny today!" }
21
+ end
22
+
23
+ node :general_response do |state|
24
+ { message: "That's interesting! Tell me more." }
25
+ end
26
+
27
+ set_entry_point :greeting
28
+ edge :greeting, :analyze_intent
29
+ set_finish_point :weather_response
30
+ set_finish_point :general_response
31
+ end
32
+
33
+
34
+ graph.compile!
35
+ puts graph.to_mermaid
36
+ result = graph.invoke({ message: "How's the weather?" })
37
+ puts result[:message] # => "The weather is sunny today!"
38
+
@@ -0,0 +1,145 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require_relative '../lib/langgraph_rb'
4
+
5
+ # Mock LLM client that can incorporate tool outputs when present
6
+ class MockLLMClient
7
+ def call(messages)
8
+ last_user = messages&.reverse&.find { |m| m[:role] == 'user' }&.dig(:content)
9
+ last_tool = messages&.reverse&.find { |m| m[:role] == 'tool' }&.dig(:content)
10
+
11
+ if last_tool
12
+ "(mock) Based on tool result: #{last_tool} | Answering user: #{last_user}"
13
+ else
14
+ "(mock) You said: #{last_user}"
15
+ end
16
+ end
17
+ end
18
+
19
+ # Simple search tool that returns a faux result string
20
+ class SearchTool
21
+ def self.call(args)
22
+ query = args.is_a?(Hash) ? args[:query] || args['query'] : args
23
+ query ||= args.to_s
24
+ "Results for '#{query}': [Result A, Result B, Result C]"
25
+ end
26
+ end
27
+
28
+ def tool_and_llm_example
29
+ puts "=== Tool + LLM Example ==="
30
+
31
+ mock_llm = MockLLMClient.new
32
+
33
+ graph = LangGraphRB::Graph.new(state_class: LangGraphRB::State) do
34
+ # 1) Capture user input into the message history
35
+ node :receive_input do |state|
36
+ user_msg = { role: 'user', content: state[:input].to_s }
37
+ existing = state[:messages] || []
38
+ { messages: existing + [user_msg], last_user_message: state[:input].to_s }
39
+ end
40
+
41
+ # 2) Decide whether to call a tool based on the user's request
42
+ # If the user says: "search <query>", produce a tool_call for SearchTool
43
+ llm_node :router, llm_client: mock_llm, system_prompt: "You are a helpful assistant that can decide to call tools when asked." do |state, context|
44
+ last_user = state[:last_user_message].to_s
45
+
46
+ if (match = last_user.match(/^\s*search\s+(.+)$/i))
47
+ query = match[1].strip
48
+ tool_call = {
49
+ id: "call_#{Time.now.to_i}",
50
+ name: 'search',
51
+ args: { query: query }
52
+ }
53
+
54
+ assistant_msg = {
55
+ role: 'assistant',
56
+ content: "Let me search for: #{query}",
57
+ tool_calls: [tool_call]
58
+ }
59
+
60
+ {
61
+ messages: (state[:messages] || []) + [assistant_msg],
62
+ tool_call: tool_call # also put it in state for convenience
63
+ }
64
+ else
65
+ # No tool needed; provide a direct assistant response using the LLM
66
+ messages = state[:messages] || []
67
+ messages = [{ role: 'system', content: context[:system_prompt] }] + messages if context[:system_prompt]
68
+ response = context[:llm_client].call(messages)
69
+
70
+ {
71
+ messages: (state[:messages] || []) + [{ role: 'assistant', content: response }],
72
+ last_response: response
73
+ }
74
+ end
75
+ end
76
+
77
+ # 3) Execute the tool if requested and append a tool message
78
+ # Use a custom block to merge the tool message with existing history
79
+ tool_node :use_tool, tool: SearchTool do |state|
80
+ # Determine the tool call (from state or messages)
81
+ tool_call = state[:tool_call]
82
+ unless tool_call
83
+ # Fallback: look for a message containing tool_calls
84
+ (state[:messages] || []).reverse.each do |msg|
85
+ if msg[:tool_calls] && msg[:tool_calls].first
86
+ tool_call = msg[:tool_calls].first
87
+ break
88
+ end
89
+ end
90
+ end
91
+
92
+ return { error: 'No tool call found' } unless tool_call
93
+
94
+ result = SearchTool.call(tool_call[:args])
95
+
96
+ tool_msg = {
97
+ role: 'tool',
98
+ content: result.to_s,
99
+ tool_call_id: tool_call[:id]
100
+ }
101
+
102
+ {
103
+ messages: (state[:messages] || []) + [tool_msg],
104
+ tool_result: result
105
+ }
106
+ end
107
+
108
+ # 4) Produce the final answer with the LLM, using any tool results
109
+ llm_node :final_answer, llm_client: mock_llm, system_prompt: "Use tool results if available to answer the user."
110
+
111
+ # Flow
112
+ set_entry_point :receive_input
113
+ edge :receive_input, :router
114
+
115
+ # If there is a tool_call, go to :use_tool, otherwise go directly to :final_answer
116
+ conditional_edge :router, ->(state) {
117
+ state[:tool_call] ? "use_tool" : "final_answer"
118
+ }, {
119
+ "use_tool" => :use_tool,
120
+ "final_answer" => :final_answer
121
+ }
122
+
123
+ edge :use_tool, :router
124
+ set_finish_point :final_answer
125
+ end
126
+
127
+ graph.compile!
128
+
129
+ puts graph.to_mermaid
130
+
131
+ puts "\n— Example 1: No tool needed —"
132
+ result1 = graph.invoke({ messages: [], input: "Tell me a joke." })
133
+ puts "Assistant: #{result1[:last_response]}"
134
+
135
+ puts "\n— Example 2: Tool is used —"
136
+ result2 = graph.invoke({ messages: [], input: "search Ruby LangGraphRB" })
137
+ final_message = (result2[:messages] || []).reverse.find { |m| m[:role] == 'assistant' }&.dig(:content)
138
+ puts "Assistant: #{final_message}"
139
+ tool_message = (result2[:messages] || []).reverse.find { |m| m[:role] == 'tool' }&.dig(:content)
140
+ puts "(Tool) #{tool_message}"
141
+ end
142
+
143
+ tool_and_llm_example
144
+
145
+
data/langgraph_rb.gemspec CHANGED
@@ -33,6 +33,7 @@ Gem::Specification.new do |spec|
33
33
 
34
34
  # Runtime dependencies
35
35
  spec.add_dependency "json", "~> 2.0"
36
+ spec.add_dependency "openai", "~> 0.24.0"
36
37
 
37
38
  # Development dependencies
38
39
  spec.add_development_dependency "bundler", "~> 2.0"
@@ -0,0 +1,197 @@
1
+ require 'openai'
2
+ require_relative 'llm_base'
3
+
4
+ module LangGraphRB
5
+ # ChatOpenAI wrapper compatible with LLMBase, supporting tool binding
6
+ class ChatOpenAI < LLMBase
7
+ def initialize(model:, temperature: 0.0, api_key: ENV['OPENAI_API_KEY'])
8
+ super(model: model, temperature: temperature)
9
+ @client = OpenAI::Client.new(api_key: api_key)
10
+ end
11
+
12
+ # Returns a new instance with tools bound (non-destructive)
13
+ def bind_tools(tools)
14
+ dup_instance = self.class.new(model: @model, temperature: @temperature)
15
+ dup_instance.instance_variable_set(:@client, @client)
16
+ dup_instance.instance_variable_set(:@bound_tools, Array(tools))
17
+ dup_instance
18
+ end
19
+
20
+ # messages: array of { role: 'system'|'user'|'assistant'|'tool', content: string, tool_calls?: [...] }
21
+ # tools: optional array of tool definitions (objects responding to .to_openai_tool_schema)
22
+ # Returns assistant text string or a tool-call envelope hash when tool calls are produced
23
+ def call(messages, tools: nil)
24
+ raise ArgumentError, 'messages must be an Array' unless messages.is_a?(Array)
25
+
26
+ tool_definitions = (tools || @bound_tools)
27
+ tool_schemas = Array(tool_definitions).flat_map do |tool|
28
+ if tool.respond_to?(:to_openai_tool_schema)
29
+ Array(tool.to_openai_tool_schema)
30
+ else
31
+ [tool]
32
+ end
33
+ end
34
+
35
+ request_payload = {
36
+ model: @model,
37
+ temperature: @temperature,
38
+ messages: normalize_messages(messages)
39
+ }
40
+
41
+ if tool_schemas && !tool_schemas.empty?
42
+ request_payload[:tools] = tool_schemas
43
+ request_payload[:tool_choice] = 'auto'
44
+ end
45
+
46
+ notify_llm_request({
47
+ name: 'OpenAI::ChatCompletion',
48
+ model: @model,
49
+ model_parameters: { temperature: @temperature },
50
+ input: request_payload[:messages]
51
+ })
52
+
53
+ # openai 0.24.0 uses client.chat.completions.create(params)
54
+ response = @client.chat.completions.create(request_payload)
55
+
56
+ message = extract_message_from_response(response)
57
+ tool_calls = message[:tool_calls]
58
+ text_content = message[:content]
59
+
60
+ usage = extract_usage_from_response(response)
61
+ notify_llm_response({
62
+ output: tool_calls ? { tool_calls: tool_calls } : text_content,
63
+ prompt_tokens: usage[:prompt_tokens],
64
+ completion_tokens: usage[:completion_tokens],
65
+ total_tokens: usage[:total_tokens]
66
+ })
67
+
68
+ if tool_calls && !tool_calls.empty?
69
+ normalized_calls = tool_calls.map do |tc|
70
+ {
71
+ id: tc[:id],
72
+ name: tc[:function][:name],
73
+ arguments: parse_tool_arguments(tc[:function][:arguments])
74
+ }
75
+ end
76
+ { tool_calls: normalized_calls }
77
+ else
78
+ text_content
79
+ end
80
+ end
81
+
82
+ private
83
+
84
+ def normalize_messages(messages)
85
+ messages.map do |m|
86
+ role = (m[:role] || m['role'])
87
+ content = m[:content] || m['content']
88
+
89
+ normalized = { role: role }
90
+
91
+ if content.is_a?(Array)
92
+ normalized[:content] = content
93
+ elsif content.nil?
94
+ normalized[:content] = nil
95
+ else
96
+ normalized[:content] = content.to_s
97
+ end
98
+
99
+ # Preserve assistant tool_calls; convert internal format back to OpenAI shape
100
+ tool_calls = m[:tool_calls] || m['tool_calls']
101
+ if tool_calls && role.to_s == 'assistant'
102
+ normalized[:tool_calls] = Array(tool_calls).map do |tc|
103
+ # Already OpenAI shape
104
+ if tc[:function] || tc['function']
105
+ fn = tc[:function] || tc['function']
106
+ raw_args = fn[:arguments] || fn['arguments']
107
+ args_str = raw_args.is_a?(String) ? raw_args : JSON.dump(raw_args || {})
108
+ {
109
+ id: (tc[:id] || tc['id']),
110
+ type: 'function',
111
+ function: {
112
+ name: (fn[:name] || fn['name']).to_s,
113
+ arguments: args_str
114
+ }
115
+ }
116
+ else
117
+ # Internal normalized shape { id:, name:, arguments: Hash|String }
118
+ raw_args = tc[:arguments] || tc['arguments']
119
+ args_str = raw_args.is_a?(String) ? raw_args : JSON.dump(raw_args || {})
120
+ {
121
+ id: (tc[:id] || tc['id']),
122
+ type: 'function',
123
+ function: {
124
+ name: (tc[:name] || tc['name']).to_s,
125
+ arguments: args_str
126
+ }
127
+ }
128
+ end
129
+ end
130
+ end
131
+
132
+ # Preserve tool message linkage
133
+ if role.to_s == 'tool'
134
+ tool_call_id = m[:tool_call_id] || m['tool_call_id']
135
+ name = m[:name] || m['name']
136
+ normalized[:tool_call_id] = tool_call_id if tool_call_id
137
+ normalized[:name] = name if name
138
+ end
139
+
140
+ normalized
141
+ end
142
+ end
143
+
144
+ def parse_tool_arguments(raw)
145
+ return {} if raw.nil?
146
+ case raw
147
+ when String
148
+ JSON.parse(raw) rescue {}
149
+ when Hash
150
+ raw
151
+ else
152
+ {}
153
+ end
154
+ end
155
+
156
+ def extract_message_from_response(response)
157
+ # Handles both Hash responses and typed OpenAI::Models::* objects
158
+ if response.respond_to?(:choices)
159
+ first_choice = response.choices.first
160
+ if first_choice.respond_to?(:[])
161
+ first_choice[:message] || first_choice['message'] || {}
162
+ else
163
+ # In some versions, choices elements are structs with #message
164
+ first_choice.message
165
+ end
166
+ else
167
+ (response['choices'] || []).dig(0, 'message') || {}
168
+ end
169
+ end
170
+
171
+ def extract_usage_from_response(response)
172
+ usage = if response.respond_to?(:usage)
173
+ response.usage
174
+ else
175
+ response['usage']
176
+ end
177
+
178
+ return { prompt_tokens: nil, completion_tokens: nil, total_tokens: nil } unless usage
179
+
180
+ if usage.respond_to?(:[]) || usage.is_a?(Hash)
181
+ {
182
+ prompt_tokens: usage[:prompt_tokens] || usage['prompt_tokens'],
183
+ completion_tokens: usage[:completion_tokens] || usage['completion_tokens'],
184
+ total_tokens: usage[:total_tokens] || usage['total_tokens']
185
+ }
186
+ else
187
+ {
188
+ prompt_tokens: usage.prompt_tokens,
189
+ completion_tokens: usage.completion_tokens,
190
+ total_tokens: usage.total_tokens
191
+ }
192
+ end
193
+ end
194
+ end
195
+ end
196
+
197
+
@@ -42,11 +42,11 @@ module LangGraphRB
42
42
  @nodes[name] = LLMNode.new(name, llm_client: llm_client, system_prompt: system_prompt, &block)
43
43
  end
44
44
 
45
- def tool_node(name, tool:, &block)
45
+ def tool_node(name, tools:, &block)
46
46
  name = name.to_sym
47
47
  raise GraphError, "Node '#{name}' already exists" if @nodes.key?(name)
48
48
 
49
- @nodes[name] = ToolNode.new(name, tool: tool, &block)
49
+ @nodes[name] = ToolNode.new(name, tools: tools, &block)
50
50
  end
51
51
 
52
52
  def edge(from, to)
@@ -149,7 +149,15 @@ module LangGraphRB
149
149
  when Edge
150
150
  lines << " #{edge.from} --> #{edge.to}"
151
151
  when ConditionalEdge
152
- lines << " #{edge.from} --> |condition| #{edge.from}_decision{condition}"
152
+ decision_name = "#{edge.from}_decision"
153
+ # Connect source to decision node with a label
154
+ lines << " #{edge.from} -- condition --> #{decision_name}{\"condition\"}"
155
+ # Add labeled branches from decision to each mapped destination
156
+ if edge.path_map && !edge.path_map.empty?
157
+ edge.path_map.each do |label, destination|
158
+ lines << " #{decision_name} -- #{label} --> #{destination}"
159
+ end
160
+ end
153
161
  when FanOutEdge
154
162
  edge.destinations.each do |dest|
155
163
  lines << " #{edge.from} --> #{dest}"
@@ -0,0 +1,60 @@
1
+ require 'json'
2
+
3
+ module LangGraphRB
4
+ # Abstract base for chat LLM clients.
5
+ # Implementations must provide #call(messages, tools: nil) and may support #bind_tools.
6
+ class LLMBase
7
+ attr_reader :model, :temperature
8
+
9
+ def initialize(model:, temperature: 0.0)
10
+ @model = model
11
+ @temperature = temperature
12
+ @bound_tools = []
13
+ @observers = []
14
+ @node_name = nil
15
+ end
16
+
17
+ # Called by runtime to allow LLM client to emit tracing/telemetry events
18
+ def set_observers(observers, node_name)
19
+ @observers = Array(observers)
20
+ @node_name = node_name
21
+ end
22
+
23
+ def bind_tools(tools)
24
+ @bound_tools = Array(tools)
25
+ self
26
+ end
27
+
28
+ def bound_tools
29
+ @bound_tools
30
+ end
31
+
32
+ def call(_messages, tools: nil)
33
+ raise NotImplementedError, "Subclasses must implement #call(messages, tools: nil)"
34
+ end
35
+
36
+ protected
37
+
38
+ def notify_llm_request(payload)
39
+ @observers.each do |observer|
40
+ begin
41
+ observer.on_llm_request(payload, @node_name)
42
+ rescue => _e
43
+ # Ignore observer errors
44
+ end
45
+ end
46
+ end
47
+
48
+ def notify_llm_response(payload)
49
+ @observers.each do |observer|
50
+ begin
51
+ observer.on_llm_response(payload, @node_name)
52
+ rescue => _e
53
+ # Ignore observer errors
54
+ end
55
+ end
56
+ end
57
+ end
58
+ end
59
+
60
+
@@ -81,19 +81,32 @@ module LangGraphRB
81
81
 
82
82
  response = (context[:llm_client] || @llm_client).call(messages)
83
83
 
84
- {
85
- messages: [{ role: 'assistant', content: response }],
86
- last_response: response
87
- }
84
+ if response.is_a?(Hash) && response[:tool_calls]
85
+ assistant_msg = {
86
+ role: 'assistant',
87
+ content: nil,
88
+ tool_calls: response[:tool_calls]
89
+ }
90
+ {
91
+ messages: (state[:messages] || []) + [assistant_msg],
92
+ tool_call: response[:tool_calls].first
93
+ }
94
+ else
95
+ assistant_msg = { role: 'assistant', content: response.to_s }
96
+ {
97
+ messages: (state[:messages] || []) + [assistant_msg],
98
+ last_response: response.to_s
99
+ }
100
+ end
88
101
  end
89
102
  end
90
103
 
91
104
  # Specialized node for tool calls
92
105
  class ToolNode < Node
93
- attr_reader :tool
106
+ attr_reader :tools
94
107
 
95
- def initialize(name, tool:, &block)
96
- @tool = tool
108
+ def initialize(name, tools:, &block)
109
+ @tools = tools
97
110
  super(name, &(block || method(:default_tool_call)))
98
111
  end
99
112
 
@@ -105,15 +118,22 @@ module LangGraphRB
105
118
 
106
119
  return { error: "No tool call found" } unless tool_call
107
120
 
108
- result = @tool.call(tool_call[:args])
121
+ # Normalize expected structure for tool dispatch
122
+ normalized = normalize_tool_call(tool_call)
123
+ tool = @tools.find { |t| t.class.name == normalized[:class_name] }
124
+ result = tool.call(normalized)
109
125
 
126
+ tool_message = {
127
+ role: 'tool',
128
+ content: result.to_s,
129
+ tool_call_id: normalized[:id],
130
+ name: normalized[:name]
131
+ }
132
+
110
133
  {
111
- messages: [{
112
- role: 'tool',
113
- content: result.to_s,
114
- tool_call_id: tool_call[:id]
115
- }],
116
- tool_result: result
134
+ **state,
135
+ messages: (state[:messages] || []) + [tool_message],
136
+ tool_call: nil
117
137
  }
118
138
  end
119
139
 
@@ -128,5 +148,34 @@ module LangGraphRB
128
148
 
129
149
  nil
130
150
  end
151
+
152
+ def normalize_tool_call(call)
153
+ # Supports shapes from OpenAI and our internal format
154
+ if call.is_a?(Hash)
155
+ if call[:name] && call[:arguments]
156
+ class_name = call[:name].to_s.split('__').first
157
+ name = call[:name].to_s.split('__').last
158
+ return {
159
+ id: call[:id],
160
+ name: name.to_sym,
161
+ class_name: class_name,
162
+ arguments: call[:arguments]
163
+ }
164
+ elsif call[:function]
165
+ return {
166
+ id: call[:id],
167
+ name: (call.dig(:function, :name) || call.dig('function', 'name')).to_sym,
168
+ arguments: call.dig(:function, :arguments) || call.dig('function', 'arguments')
169
+ }
170
+ elsif call[:args]
171
+ return {
172
+ id: call[:id],
173
+ name: (call[:name] || call['name']).to_sym,
174
+ arguments: call[:args]
175
+ }
176
+ end
177
+ end
178
+ call
179
+ end
131
180
  end
132
181
  end
@@ -0,0 +1,97 @@
1
+ require 'json'
2
+
3
+ module LangGraphRB
4
+ # Mixin to declare tool functions compatible with OpenAI tool/function calling
5
+ module ToolDefinition
6
+ def self.extended(base)
7
+ base.instance_variable_set(:@__tool_functions, {})
8
+ end
9
+
10
+ def define_function(name, description: "", &block)
11
+ class_name = self.name
12
+ fn_name = "#{class_name}__#{name}".to_sym
13
+ @__tool_functions ||= {}
14
+ @__tool_functions[fn_name] = {
15
+ name: fn_name,
16
+ description: description,
17
+ parameters: { type: 'object', properties: {}, required: [] }
18
+ }
19
+
20
+ # Evaluate the DSL inside a builder to collect properties
21
+ if block
22
+ builder = FunctionSchemaBuilder.new(@__tool_functions[fn_name][:parameters])
23
+ builder.instance_eval(&block)
24
+ end
25
+ end
26
+
27
+ def tool_functions
28
+ @__tool_functions || {}
29
+ end
30
+
31
+ def to_openai_tool_schema
32
+ # One class may expose multiple functions; return an array of tool entries
33
+ tool_functions.values.map do |fn|
34
+ {
35
+ type: 'function',
36
+ function: {
37
+ name: fn[:name].to_s,
38
+ description: fn[:description],
39
+ parameters: fn[:parameters]
40
+ }
41
+ }
42
+ end
43
+ end
44
+
45
+ class FunctionSchemaBuilder
46
+ def initialize(parameters)
47
+ @parameters = parameters
48
+ end
49
+
50
+ def property(name, type:, description: "", required: false)
51
+ @parameters[:properties][name.to_sym] = { type: type, description: description }
52
+ if required
53
+ @parameters[:required] ||= []
54
+ @parameters[:required] << name.to_sym
55
+ end
56
+ end
57
+ end
58
+ end
59
+
60
+ # Base class for tools using the ToolDefinition mixin
61
+ class ToolBase
62
+ extend ToolDefinition
63
+
64
+ def call(call_args)
65
+ # call_args: { name:, arguments: {} } or OpenAI-like hash
66
+ name = call_args[:name] || call_args['name']
67
+ args = call_args[:arguments] || call_args['arguments'] || {}
68
+ raise ArgumentError, 'Tool call missing name' if name.nil?
69
+
70
+ method_name = name.to_sym
71
+ unless respond_to?(method_name)
72
+ raise ArgumentError, "Undefined tool function: #{name}"
73
+ end
74
+
75
+ result = public_send(method_name, **symbolize_keys(args))
76
+ tool_response(result)
77
+ end
78
+
79
+ # Standardize tool responses; can be overridden by subclasses
80
+ def tool_response(payload)
81
+ payload
82
+ end
83
+
84
+ def to_openai_tool_schema
85
+ self.class.to_openai_tool_schema
86
+ end
87
+
88
+ private
89
+
90
+ def symbolize_keys(hash)
91
+ return hash unless hash.is_a?(Hash)
92
+ hash.transform_keys { |k| k.respond_to?(:to_sym) ? k.to_sym : k }
93
+ end
94
+ end
95
+ end
96
+
97
+
@@ -1,3 +1,3 @@
1
1
  module LangGraphRB
2
- VERSION = "0.1.4"
2
+ VERSION = "0.1.5"
3
3
  end
data/lib/langgraph_rb.rb CHANGED
@@ -9,6 +9,9 @@ require_relative 'langgraph_rb/stores/memory'
9
9
  require_relative 'langgraph_rb/observers/base'
10
10
  require_relative 'langgraph_rb/observers/logger'
11
11
  require_relative 'langgraph_rb/observers/structured'
12
+ require_relative 'langgraph_rb/llm_base'
13
+ require_relative 'langgraph_rb/chat_openai'
14
+ require_relative 'langgraph_rb/tool_definition'
12
15
 
13
16
  module LangGraphRB
14
17
  class Error < StandardError; end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: langgraph_rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.4
4
+ version: 0.1.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Julian Toro
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2025-09-16 00:00:00.000000000 Z
11
+ date: 2025-09-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: json
@@ -24,6 +24,20 @@ dependencies:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
26
  version: '2.0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: openai
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: 0.24.0
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: 0.24.0
27
41
  - !ruby/object:Gem::Dependency
28
42
  name: bundler
29
43
  requirement: !ruby/object:Gem::Requirement
@@ -111,17 +125,22 @@ files:
111
125
  - SUMMARY.md
112
126
  - examples/advanced_example.rb
113
127
  - examples/basic_example.rb
128
+ - examples/chat_openai_tools_example.rb
129
+ - examples/conditional_example.rb
114
130
  - examples/initial_state_example.rb
115
131
  - examples/langfuse_example.rb
116
132
  - examples/llmnode_example.rb
117
133
  - examples/observer_example.rb
118
134
  - examples/reducers_example.rb
119
135
  - examples/simple_test.rb
136
+ - examples/tool_and_llm_example.rb
120
137
  - langgraph_rb.gemspec
121
138
  - lib/langgraph_rb.rb
139
+ - lib/langgraph_rb/chat_openai.rb
122
140
  - lib/langgraph_rb/command.rb
123
141
  - lib/langgraph_rb/edge.rb
124
142
  - lib/langgraph_rb/graph.rb
143
+ - lib/langgraph_rb/llm_base.rb
125
144
  - lib/langgraph_rb/node.rb
126
145
  - lib/langgraph_rb/observers/base.rb
127
146
  - lib/langgraph_rb/observers/logger.rb
@@ -129,6 +148,7 @@ files:
129
148
  - lib/langgraph_rb/runner.rb
130
149
  - lib/langgraph_rb/state.rb
131
150
  - lib/langgraph_rb/stores/memory.rb
151
+ - lib/langgraph_rb/tool_definition.rb
132
152
  - lib/langgraph_rb/version.rb
133
153
  - test_runner.rb
134
154
  homepage: https://github.com/fulit103/langgraph_rb