langgraph_rb 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f5649cc9cef30c96380dc2ccd9a5c576af8d3efd7e02552b03d81cb45360f408
4
- data.tar.gz: 3c706c127ac2fbd5e9d8a0be723575228a21d9ff91672169dc66174cd8dba3c5
3
+ metadata.gz: 071bb22811337400b9569f90e07b50c0f7c6784b1025ce30b0502c5d321965b5
4
+ data.tar.gz: dad734ef3e60f76d8bf47de4c1f5f50a1b361dad8cfd7e36ed57e3cd4f998dca
5
5
  SHA512:
6
- metadata.gz: 7dd32ea2d6c98ae356596cee6ca0d697802e7107649faf63c598606f9b708a68cd7c72e72d4ad5423d136a72d74c242108c28b670f6ab2aea2cf3a18925f35d4
7
- data.tar.gz: d6a30010935f797cc6ebbe3767db38c8726029956192b57cd69304d6b0238d3dd0bdb1b32d7d9b348bbc8ee4469f8b1fe62b750386834c9cb7ce52980f1f25a4
6
+ metadata.gz: 20c5c1c2ab980770d8ef0ca69159c1566c889c3ecd80d9185f6767b7954d92f5a15847ac50b1024dbd976670dc90d76b1bec9a95fa5d55b6123d6cdb26633bd2
7
+ data.tar.gz: a6edae4423708c7f807a050a9d4238d36ed188d0a0697e04eb2f21cebb3987ddf9d7f627cd6657d0b128470a58ca2a6895d02614da30341c73dbd5af1584337e
data/Gemfile CHANGED
@@ -2,9 +2,12 @@ source 'https://rubygems.org'
2
2
 
3
3
  gemspec
4
4
 
5
+ gem 'openai', '~> 0.24.0'
6
+
5
7
  group :development, :test do
6
8
  gem 'rspec', '~> 3.0'
7
9
  gem 'pry', '~> 0.14'
10
+ gem 'pry-byebug'
8
11
  gem 'rubocop', '~> 1.0'
9
12
  gem 'langfuse', '~> 0.1'
10
13
  end
data/Gemfile.lock CHANGED
@@ -1,15 +1,18 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- langgraph_rb (0.1.2)
4
+ langgraph_rb (0.1.4)
5
5
  json (~> 2.0)
6
+ openai (~> 0.24.0)
6
7
 
7
8
  GEM
8
9
  remote: https://rubygems.org/
9
10
  specs:
10
11
  ast (2.4.3)
12
+ byebug (12.0.0)
11
13
  coderay (1.1.3)
12
14
  concurrent-ruby (1.3.5)
15
+ connection_pool (2.5.4)
13
16
  diff-lcs (1.6.2)
14
17
  json (2.13.2)
15
18
  langfuse (0.1.1)
@@ -18,6 +21,8 @@ GEM
18
21
  language_server-protocol (3.17.0.5)
19
22
  lint_roller (1.1.0)
20
23
  method_source (1.1.0)
24
+ openai (0.24.0)
25
+ connection_pool
21
26
  parallel (1.27.0)
22
27
  parser (3.3.9.0)
23
28
  ast (~> 2.4.1)
@@ -26,6 +31,9 @@ GEM
26
31
  pry (0.15.2)
27
32
  coderay (~> 1.1)
28
33
  method_source (~> 1.0)
34
+ pry-byebug (3.11.0)
35
+ byebug (~> 12.0)
36
+ pry (>= 0.13, < 0.16)
29
37
  racc (1.8.1)
30
38
  rainbow (3.1.1)
31
39
  rake (13.3.0)
@@ -71,7 +79,9 @@ DEPENDENCIES
71
79
  bundler (~> 2.0)
72
80
  langfuse (~> 0.1)
73
81
  langgraph_rb!
82
+ openai (~> 0.24.0)
74
83
  pry (~> 0.14)
84
+ pry-byebug
75
85
  rake (~> 13.0)
76
86
  rspec (~> 3.0)
77
87
  rubocop (~> 1.0)
@@ -123,10 +123,11 @@ def basic_example
123
123
  }
124
124
 
125
125
  # All responses go back to waiting for input (except farewell)
126
- edge :handle_greeting, :receive_input
127
- edge :handle_help, :receive_input
128
- edge :handle_weather, :receive_input
129
- edge :general_response, :receive_input
126
+ set_finish_point :handle_greeting
127
+ set_finish_point :handle_farewell
128
+ set_finish_point :handle_help
129
+ set_finish_point :handle_weather
130
+ set_finish_point :general_response
130
131
  end
131
132
 
132
133
  # Compile the graph
@@ -205,7 +206,9 @@ def streaming_example
205
206
  end
206
207
 
207
208
  # Run examples
208
- if __FILE__ == $0
209
- basic_example
210
- streaming_example
211
- end
209
+ # if __FILE__ == $0
210
+ # basic_example
211
+ # streaming_example
212
+ # end
213
+
214
+ basic_example
@@ -0,0 +1,130 @@
1
+ #!/usr/bin/env ruby
2
+ require 'pry'
3
+ require 'pry-byebug'
4
+ require 'langfuse'
5
+ require_relative '../lib/langgraph_rb'
6
+
7
+ url = 'https://us.cloud.langfuse.com'
8
+
9
+ Langfuse.configure do |config|
10
+ config.public_key = ENV['LANGFUSE_PUBLIC_KEY'] # e.g., 'pk-lf-...'
11
+ config.secret_key = ENV['LANGFUSE_SECRET_KEY'] # e.g., 'sk-lf-...'
12
+ config.host = url
13
+ config.debug = true # Enable debug logging
14
+ end
15
+
16
+ class MovieInfoTool < LangGraphRB::ToolBase
17
+ define_function :search_movie, description: "MovieInfoTool: Search for a movie by title" do
18
+ property :query, type: "string", description: "The movie title to search for", required: true
19
+ end
20
+
21
+ define_function :get_movie_details, description: "MovieInfoTool: Get detailed information about a specific movie" do
22
+ property :movie_id, type: "integer", description: "The TMDb ID of the movie", required: true
23
+ end
24
+
25
+ def initialize(api_key: "demo")
26
+ @api_key = api_key
27
+ end
28
+
29
+ def search_movie(query:)
30
+ tool_response({ results: [ { id: 603, title: query, year: 1999 } ] })
31
+ end
32
+
33
+ def get_movie_details(movie_id:)
34
+ tool_response({ id: movie_id, title: "The Matrix", overview: "A computer hacker learns the truth of reality." })
35
+ end
36
+ end
37
+
38
+ def run_chat_openai_tools
39
+ tools = [MovieInfoTool.new(api_key: ENV['TMDB_API_KEY'] || 'demo')]
40
+
41
+ chat = LangGraphRB::ChatOpenAI.new(model: ENV.fetch('OPENAI_MODEL', 'gpt-4o-mini'), temperature: 0)
42
+ chat = chat.bind_tools(tools)
43
+
44
+ observers = [LangGraphRB::Observers::LangfuseObserver.new(name: 'chat-openai-tools-example')]
45
+
46
+ graph = LangGraphRB::Graph.new do
47
+ node :receive_input do |state|
48
+ user_msg = { role: 'user', content: state[:input].to_s }
49
+ existing = state[:messages] || []
50
+ { messages: existing + [user_msg] }
51
+ end
52
+
53
+ llm_node :chat, llm_client: chat, system_prompt: "You are a movie assistant. Use tools when helpful."
54
+
55
+ tool_node :tool, tools: tools
56
+
57
+ node :final_answer do |state|
58
+ { **state }
59
+ end
60
+
61
+ set_entry_point :receive_input
62
+ edge :receive_input, :chat
63
+
64
+ conditional_edge :chat, ->(state) {
65
+ state[:tool_call] ? "use_tool" : "final_answer"
66
+ }, {
67
+ "use_tool" => :tool,
68
+ "final_answer" => :final_answer
69
+ }
70
+
71
+ edge :tool, :chat
72
+ set_finish_point :final_answer
73
+ end
74
+
75
+ graph.compile!
76
+
77
+ graph.draw_mermaid
78
+
79
+ start = { messages: [], input: "Find details about 'The Matrix'" }
80
+ result = graph.invoke(start, observers: observers)
81
+ puts "Messages:"
82
+ (result[:messages] || []).each do |m|
83
+ if m[:role] == 'assistant' && m[:tool_calls]
84
+ names = m[:tool_calls].map { |tc| tc[:name] }.join(', ')
85
+ puts "- assistant tool_calls: #{names}"
86
+ else
87
+ puts "- #{m[:role]}: #{m[:content]}"
88
+ end
89
+ end
90
+ end
91
+
92
+ run_chat_openai_tools
93
+
94
+
95
+ # llm_node :chat, llm_client: chat, system_prompt: "You are a movie assistant. Use tools when helpful." do |state, context|
96
+ # messages = state[:messages] || []
97
+ # messages = [{ role: 'system', content: context[:system_prompt] }] + messages if context[:system_prompt]
98
+
99
+ # response = context[:llm_client].call(messages)
100
+
101
+ # if response.is_a?(Hash) && response[:tool_calls]
102
+ # assistant_msg = { role: 'assistant', content: nil, tool_calls: response[:tool_calls] }
103
+ # { messages: (state[:messages] || []) + [assistant_msg], tool_call: response[:tool_calls].first }
104
+ # else
105
+ # assistant_msg = { role: 'assistant', content: response.to_s }
106
+ # { messages: (state[:messages] || []) + [assistant_msg], last_response: response.to_s }
107
+ # end
108
+ # end
109
+
110
+ # node :tool do |state|
111
+ # tool_call = state[:tool_call]
112
+ # tool_name = tool_call[:name]
113
+ # tool_args = tool_call[:arguments]
114
+ # tool_call_id = tool_call[:id]
115
+
116
+ # puts "TOOL CALL #########################"
117
+ # puts "tool_name: #{tool_name}"
118
+ # puts "tool_args: #{tool_args}"
119
+ # puts "tool_call_id: #{tool_call_id}"
120
+ # puts "########################"
121
+ # puts "########################"
122
+
123
+ # tool_method_name = tool_name.to_s.split('__').last
124
+
125
+ # # Dispatch via ToolBase API to keep consistent interface
126
+ # tool_result = tools.call({ name: tool_method_name, arguments: tool_args })
127
+
128
+ # { messages: (state[:messages] || []) + [{ role: 'tool', content: tool_result.to_json, tool_call_id: tool_call_id, name: tool_name.to_s }],
129
+ # tool_call: nil }
130
+ # end
@@ -0,0 +1,38 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require_relative '../lib/langgraph_rb'
4
+
5
+ graph = LangGraphRB::Graph.new do
6
+ node :greeting do |state|
7
+ { message: "Hello, how can I help you today?" }
8
+ end
9
+
10
+ node :analyze_intent do |state|
11
+ { intent: state[:message].downcase.include?("weather") ? "weather" : "general" }
12
+ end
13
+
14
+ conditional_edge :analyze_intent, ->(state) { state[:intent] }, {
15
+ "weather" => :weather_response,
16
+ "general" => :general_response
17
+ }
18
+
19
+ node :weather_response do |state|
20
+ { message: "The weather is sunny today!" }
21
+ end
22
+
23
+ node :general_response do |state|
24
+ { message: "That's interesting! Tell me more." }
25
+ end
26
+
27
+ set_entry_point :greeting
28
+ edge :greeting, :analyze_intent
29
+ set_finish_point :weather_response
30
+ set_finish_point :general_response
31
+ end
32
+
33
+
34
+ graph.compile!
35
+ puts graph.to_mermaid
36
+ result = graph.invoke({ message: "How's the weather?" })
37
+ puts result[:message] # => "The weather is sunny today!"
38
+
@@ -12,27 +12,6 @@ Langfuse.configure do |config|
12
12
  end
13
13
 
14
14
 
15
- class LangfuseObserver < LangGraphRB::Observers::BaseObserver
16
-
17
- def on_graph_start(event)
18
- @trace ||= Langfuse.trace(
19
- name: "graph-start2",
20
- thread_id: event.thread_id,
21
- metadata: event.to_h
22
- )
23
- end
24
-
25
- def on_node_end(event)
26
- span = Langfuse.span(
27
- name: "node-#{event.node_name}",
28
- trace_id: @trace.id,
29
- input: event.to_h,
30
- )
31
- Langfuse.update_span(span)
32
- end
33
- end
34
-
35
-
36
15
  def langfuse_example
37
16
  puts "########################################################"
38
17
  puts "########################################################"
@@ -82,7 +61,10 @@ def langfuse_example
82
61
 
83
62
 
84
63
  graph.compile!
85
- result = graph.invoke({ message: "Hello World", value: 31}, observers: [LangfuseObserver.new])
64
+ result = graph.invoke(
65
+ { message: "Hello World", value: 31},
66
+ observers: [LangGraphRB::Observers::LangfuseObserver.new(name: 'langfuse-example')]
67
+ )
86
68
  puts "Result: #{result}"
87
69
  puts "########################################################"
88
70
  puts "########################################################"
data/langgraph_rb.gemspec CHANGED
@@ -33,6 +33,7 @@ Gem::Specification.new do |spec|
33
33
 
34
34
  # Runtime dependencies
35
35
  spec.add_dependency "json", "~> 2.0"
36
+ spec.add_dependency "openai", "~> 0.24.0"
36
37
 
37
38
  # Development dependencies
38
39
  spec.add_development_dependency "bundler", "~> 2.0"
@@ -0,0 +1,197 @@
1
+ require 'openai'
2
+ require_relative 'llm_base'
3
+
4
+ module LangGraphRB
5
+ # ChatOpenAI wrapper compatible with LLMBase, supporting tool binding
6
+ class ChatOpenAI < LLMBase
7
+ def initialize(model:, temperature: 0.0, api_key: ENV['OPENAI_API_KEY'])
8
+ super(model: model, temperature: temperature)
9
+ @client = OpenAI::Client.new(api_key: api_key)
10
+ end
11
+
12
+ # Returns a new instance with tools bound (non-destructive)
13
+ def bind_tools(tools)
14
+ dup_instance = self.class.new(model: @model, temperature: @temperature)
15
+ dup_instance.instance_variable_set(:@client, @client)
16
+ dup_instance.instance_variable_set(:@bound_tools, Array(tools))
17
+ dup_instance
18
+ end
19
+
20
+ # messages: array of { role: 'system'|'user'|'assistant'|'tool', content: string, tool_calls?: [...] }
21
+ # tools: optional array of tool definitions (objects responding to .to_openai_tool_schema)
22
+ # Returns assistant text string or a tool-call envelope hash when tool calls are produced
23
+ def call(messages, tools: nil)
24
+ raise ArgumentError, 'messages must be an Array' unless messages.is_a?(Array)
25
+
26
+ tool_definitions = (tools || @bound_tools)
27
+ tool_schemas = Array(tool_definitions).flat_map do |tool|
28
+ if tool.respond_to?(:to_openai_tool_schema)
29
+ Array(tool.to_openai_tool_schema)
30
+ else
31
+ [tool]
32
+ end
33
+ end
34
+
35
+ request_payload = {
36
+ model: @model,
37
+ temperature: @temperature,
38
+ messages: normalize_messages(messages)
39
+ }
40
+
41
+ if tool_schemas && !tool_schemas.empty?
42
+ request_payload[:tools] = tool_schemas
43
+ request_payload[:tool_choice] = 'auto'
44
+ end
45
+
46
+ notify_llm_request({
47
+ name: 'OpenAI::ChatCompletion',
48
+ model: @model,
49
+ model_parameters: { temperature: @temperature },
50
+ input: request_payload[:messages]
51
+ })
52
+
53
+ # openai 0.24.0 uses client.chat.completions.create(params)
54
+ response = @client.chat.completions.create(request_payload)
55
+
56
+ message = extract_message_from_response(response)
57
+ tool_calls = message[:tool_calls]
58
+ text_content = message[:content]
59
+
60
+ usage = extract_usage_from_response(response)
61
+ notify_llm_response({
62
+ output: tool_calls ? { tool_calls: tool_calls } : text_content,
63
+ prompt_tokens: usage[:prompt_tokens],
64
+ completion_tokens: usage[:completion_tokens],
65
+ total_tokens: usage[:total_tokens]
66
+ })
67
+
68
+ if tool_calls && !tool_calls.empty?
69
+ normalized_calls = tool_calls.map do |tc|
70
+ {
71
+ id: tc[:id],
72
+ name: tc[:function][:name],
73
+ arguments: parse_tool_arguments(tc[:function][:arguments])
74
+ }
75
+ end
76
+ { tool_calls: normalized_calls }
77
+ else
78
+ text_content
79
+ end
80
+ end
81
+
82
+ private
83
+
84
+ def normalize_messages(messages)
85
+ messages.map do |m|
86
+ role = (m[:role] || m['role'])
87
+ content = m[:content] || m['content']
88
+
89
+ normalized = { role: role }
90
+
91
+ if content.is_a?(Array)
92
+ normalized[:content] = content
93
+ elsif content.nil?
94
+ normalized[:content] = nil
95
+ else
96
+ normalized[:content] = content.to_s
97
+ end
98
+
99
+ # Preserve assistant tool_calls; convert internal format back to OpenAI shape
100
+ tool_calls = m[:tool_calls] || m['tool_calls']
101
+ if tool_calls && role.to_s == 'assistant'
102
+ normalized[:tool_calls] = Array(tool_calls).map do |tc|
103
+ # Already OpenAI shape
104
+ if tc[:function] || tc['function']
105
+ fn = tc[:function] || tc['function']
106
+ raw_args = fn[:arguments] || fn['arguments']
107
+ args_str = raw_args.is_a?(String) ? raw_args : JSON.dump(raw_args || {})
108
+ {
109
+ id: (tc[:id] || tc['id']),
110
+ type: 'function',
111
+ function: {
112
+ name: (fn[:name] || fn['name']).to_s,
113
+ arguments: args_str
114
+ }
115
+ }
116
+ else
117
+ # Internal normalized shape { id:, name:, arguments: Hash|String }
118
+ raw_args = tc[:arguments] || tc['arguments']
119
+ args_str = raw_args.is_a?(String) ? raw_args : JSON.dump(raw_args || {})
120
+ {
121
+ id: (tc[:id] || tc['id']),
122
+ type: 'function',
123
+ function: {
124
+ name: (tc[:name] || tc['name']).to_s,
125
+ arguments: args_str
126
+ }
127
+ }
128
+ end
129
+ end
130
+ end
131
+
132
+ # Preserve tool message linkage
133
+ if role.to_s == 'tool'
134
+ tool_call_id = m[:tool_call_id] || m['tool_call_id']
135
+ name = m[:name] || m['name']
136
+ normalized[:tool_call_id] = tool_call_id if tool_call_id
137
+ normalized[:name] = name if name
138
+ end
139
+
140
+ normalized
141
+ end
142
+ end
143
+
144
+ def parse_tool_arguments(raw)
145
+ return {} if raw.nil?
146
+ case raw
147
+ when String
148
+ JSON.parse(raw) rescue {}
149
+ when Hash
150
+ raw
151
+ else
152
+ {}
153
+ end
154
+ end
155
+
156
+ def extract_message_from_response(response)
157
+ # Handles both Hash responses and typed OpenAI::Models::* objects
158
+ if response.respond_to?(:choices)
159
+ first_choice = response.choices.first
160
+ if first_choice.respond_to?(:[])
161
+ first_choice[:message] || first_choice['message'] || {}
162
+ else
163
+ # In some versions, choices elements are structs with #message
164
+ first_choice.message
165
+ end
166
+ else
167
+ (response['choices'] || []).dig(0, 'message') || {}
168
+ end
169
+ end
170
+
171
+ def extract_usage_from_response(response)
172
+ usage = if response.respond_to?(:usage)
173
+ response.usage
174
+ else
175
+ response['usage']
176
+ end
177
+
178
+ return { prompt_tokens: nil, completion_tokens: nil, total_tokens: nil } unless usage
179
+
180
+ if usage.respond_to?(:[]) || usage.is_a?(Hash)
181
+ {
182
+ prompt_tokens: usage[:prompt_tokens] || usage['prompt_tokens'],
183
+ completion_tokens: usage[:completion_tokens] || usage['completion_tokens'],
184
+ total_tokens: usage[:total_tokens] || usage['total_tokens']
185
+ }
186
+ else
187
+ {
188
+ prompt_tokens: usage.prompt_tokens,
189
+ completion_tokens: usage.completion_tokens,
190
+ total_tokens: usage.total_tokens
191
+ }
192
+ end
193
+ end
194
+ end
195
+ end
196
+
197
+
@@ -42,11 +42,11 @@ module LangGraphRB
42
42
  @nodes[name] = LLMNode.new(name, llm_client: llm_client, system_prompt: system_prompt, &block)
43
43
  end
44
44
 
45
- def tool_node(name, tool:, &block)
45
+ def tool_node(name, tools:, &block)
46
46
  name = name.to_sym
47
47
  raise GraphError, "Node '#{name}' already exists" if @nodes.key?(name)
48
48
 
49
- @nodes[name] = ToolNode.new(name, tool: tool, &block)
49
+ @nodes[name] = ToolNode.new(name, tools: tools, &block)
50
50
  end
51
51
 
52
52
  def edge(from, to)
@@ -149,7 +149,15 @@ module LangGraphRB
149
149
  when Edge
150
150
  lines << " #{edge.from} --> #{edge.to}"
151
151
  when ConditionalEdge
152
- lines << " #{edge.from} --> |condition| #{edge.from}_decision{condition}"
152
+ decision_name = "#{edge.from}_decision"
153
+ # Connect source to decision node with a label
154
+ lines << " #{edge.from} -- condition --> #{decision_name}{\"condition\"}"
155
+ # Add labeled branches from decision to each mapped destination
156
+ if edge.path_map && !edge.path_map.empty?
157
+ edge.path_map.each do |label, destination|
158
+ lines << " #{decision_name} -- #{label} --> #{destination}"
159
+ end
160
+ end
153
161
  when FanOutEdge
154
162
  edge.destinations.each do |dest|
155
163
  lines << " #{edge.from} --> #{dest}"
@@ -206,7 +214,10 @@ module LangGraphRB
206
214
  validate_node_exists!(edge.to)
207
215
  when ConditionalEdge
208
216
  validate_node_exists!(edge.from)
209
- # Path map targets will be validated at runtime
217
+ # If a static path_map is provided, validate mapped destinations now
218
+ if edge.path_map && !edge.path_map.empty?
219
+ edge.path_map.values.each { |dest| validate_node_exists!(dest) }
220
+ end
210
221
  when FanOutEdge
211
222
  validate_node_exists!(edge.from)
212
223
  edge.destinations.each { |dest| validate_node_exists!(dest) }
@@ -218,6 +229,12 @@ module LangGraphRB
218
229
  case edge
219
230
  when Edge
220
231
  [edge.to]
232
+ when ConditionalEdge
233
+ if edge.path_map && !edge.path_map.empty?
234
+ edge.path_map.values
235
+ else
236
+ []
237
+ end
221
238
  when FanOutEdge
222
239
  edge.destinations
223
240
  else
@@ -256,11 +273,17 @@ module LangGraphRB
256
273
  case edge
257
274
  when Edge
258
275
  reachable += find_reachable_nodes(edge.to, visited.dup)
276
+ when ConditionalEdge
277
+ # If a static path_map is provided, consider all mapped destinations reachable
278
+ if edge.path_map && !edge.path_map.empty?
279
+ edge.path_map.values.each do |dest|
280
+ reachable += find_reachable_nodes(dest, visited.dup)
281
+ end
282
+ end
259
283
  when FanOutEdge
260
284
  edge.destinations.each do |dest|
261
285
  reachable += find_reachable_nodes(dest, visited.dup)
262
- end
263
- # ConditionalEdge paths are dynamic, so we can't pre-validate them
286
+ end
264
287
  end
265
288
  end
266
289
 
@@ -0,0 +1,60 @@
1
+ require 'json'
2
+
3
+ module LangGraphRB
4
+ # Abstract base for chat LLM clients.
5
+ # Implementations must provide #call(messages, tools: nil) and may support #bind_tools.
6
+ class LLMBase
7
+ attr_reader :model, :temperature
8
+
9
+ def initialize(model:, temperature: 0.0)
10
+ @model = model
11
+ @temperature = temperature
12
+ @bound_tools = []
13
+ @observers = []
14
+ @node_name = nil
15
+ end
16
+
17
+ # Called by runtime to allow LLM client to emit tracing/telemetry events
18
+ def set_observers(observers, node_name)
19
+ @observers = Array(observers)
20
+ @node_name = node_name
21
+ end
22
+
23
+ def bind_tools(tools)
24
+ @bound_tools = Array(tools)
25
+ self
26
+ end
27
+
28
+ def bound_tools
29
+ @bound_tools
30
+ end
31
+
32
+ def call(_messages, tools: nil)
33
+ raise NotImplementedError, "Subclasses must implement #call(messages, tools: nil)"
34
+ end
35
+
36
+ protected
37
+
38
+ def notify_llm_request(payload)
39
+ @observers.each do |observer|
40
+ begin
41
+ observer.on_llm_request(payload, @node_name)
42
+ rescue => _e
43
+ # Ignore observer errors
44
+ end
45
+ end
46
+ end
47
+
48
+ def notify_llm_response(payload)
49
+ @observers.each do |observer|
50
+ begin
51
+ observer.on_llm_response(payload, @node_name)
52
+ rescue => _e
53
+ # Ignore observer errors
54
+ end
55
+ end
56
+ end
57
+ end
58
+ end
59
+
60
+
@@ -81,19 +81,32 @@ module LangGraphRB
81
81
 
82
82
  response = (context[:llm_client] || @llm_client).call(messages)
83
83
 
84
- {
85
- messages: [{ role: 'assistant', content: response }],
86
- last_response: response
87
- }
84
+ if response.is_a?(Hash) && response[:tool_calls]
85
+ assistant_msg = {
86
+ role: 'assistant',
87
+ content: nil,
88
+ tool_calls: response[:tool_calls]
89
+ }
90
+ {
91
+ messages: (state[:messages] || []) + [assistant_msg],
92
+ tool_call: response[:tool_calls].first
93
+ }
94
+ else
95
+ assistant_msg = { role: 'assistant', content: response.to_s }
96
+ {
97
+ messages: (state[:messages] || []) + [assistant_msg],
98
+ last_response: response.to_s
99
+ }
100
+ end
88
101
  end
89
102
  end
90
103
 
91
104
  # Specialized node for tool calls
92
105
  class ToolNode < Node
93
- attr_reader :tool
106
+ attr_reader :tools
94
107
 
95
- def initialize(name, tool:, &block)
96
- @tool = tool
108
+ def initialize(name, tools:, &block)
109
+ @tools = tools
97
110
  super(name, &(block || method(:default_tool_call)))
98
111
  end
99
112
 
@@ -105,15 +118,22 @@ module LangGraphRB
105
118
 
106
119
  return { error: "No tool call found" } unless tool_call
107
120
 
108
- result = @tool.call(tool_call[:args])
121
+ # Normalize expected structure for tool dispatch
122
+ normalized = normalize_tool_call(tool_call)
123
+ tool = @tools.find { |t| t.class.name == normalized[:class_name] }
124
+ result = tool.call(normalized)
109
125
 
126
+ tool_message = {
127
+ role: 'tool',
128
+ content: result.to_s,
129
+ tool_call_id: normalized[:id],
130
+ name: normalized[:name]
131
+ }
132
+
110
133
  {
111
- messages: [{
112
- role: 'tool',
113
- content: result.to_s,
114
- tool_call_id: tool_call[:id]
115
- }],
116
- tool_result: result
134
+ **state,
135
+ messages: (state[:messages] || []) + [tool_message],
136
+ tool_call: nil
117
137
  }
118
138
  end
119
139
 
@@ -128,5 +148,34 @@ module LangGraphRB
128
148
 
129
149
  nil
130
150
  end
151
+
152
+ def normalize_tool_call(call)
153
+ # Supports shapes from OpenAI and our internal format
154
+ if call.is_a?(Hash)
155
+ if call[:name] && call[:arguments]
156
+ class_name = call[:name].to_s.split('__').first
157
+ name = call[:name].to_s.split('__').last
158
+ return {
159
+ id: call[:id],
160
+ name: name.to_sym,
161
+ class_name: class_name,
162
+ arguments: call[:arguments]
163
+ }
164
+ elsif call[:function]
165
+ return {
166
+ id: call[:id],
167
+ name: (call.dig(:function, :name) || call.dig('function', 'name')).to_sym,
168
+ arguments: call.dig(:function, :arguments) || call.dig('function', 'arguments')
169
+ }
170
+ elsif call[:args]
171
+ return {
172
+ id: call[:id],
173
+ name: (call[:name] || call['name']).to_sym,
174
+ arguments: call[:args]
175
+ }
176
+ end
177
+ end
178
+ call
179
+ end
131
180
  end
132
181
  end
@@ -0,0 +1,205 @@
1
+ require 'langfuse'
2
+
3
+ module LangGraphRB
4
+ module Observers
5
+ # Langfuse observer that captures graph, node, and LLM events.
6
+ # - Creates a Langfuse trace for each graph run (thread_id)
7
+ # - Creates spans per node execution and links LLM generations to spans
8
+ # - Thread-safe and resilient to Langfuse client errors
9
+ class LangfuseObserver < BaseObserver
10
+ def initialize(name: 'langgraph-run')
11
+ @name = name
12
+ @trace = nil
13
+ @trace_mutex = Mutex.new
14
+
15
+ # Maintain a stack per node_name to safely handle parallel executions
16
+ # { Symbol(String) => [ { span: <Span>, generation: <Generation>|nil } ] }
17
+ @records_by_node = Hash.new { |h, k| h[k] = [] }
18
+ @records_mutex = Mutex.new
19
+ end
20
+
21
+ # Graph lifecycle
22
+ def on_graph_start(event)
23
+ ensure_trace!(event)
24
+ rescue => _e
25
+ # Swallow observer errors to avoid impacting execution
26
+ end
27
+
28
+ def on_graph_end(event)
29
+ return unless @trace
30
+ Langfuse.trace(id: @trace.id, output: safe_state(event.initial_state))
31
+ rescue => _e
32
+ end
33
+
34
+ # Node lifecycle
35
+ def on_node_start(event)
36
+ return if event.node_name == :__start__
37
+
38
+ trace = ensure_trace!(event)
39
+ return unless trace
40
+
41
+ span = Langfuse.span(
42
+ name: event.node_name.to_s,
43
+ trace_id: trace.id,
44
+ metadata: event.to_h
45
+ )
46
+
47
+ # Track record on a stack keyed by node_name
48
+ with_records_lock do
49
+ @records_by_node[event.node_name] << { span: span, generation: nil }
50
+ end
51
+
52
+ Langfuse.update_span(span)
53
+ rescue => _e
54
+ end
55
+
56
+ def on_node_end(event)
57
+ return if event.node_name == :__start__
58
+
59
+ record = with_records_lock do
60
+ @records_by_node[event.node_name].pop
61
+ end
62
+
63
+ span = record && record[:span]
64
+ return unless span
65
+
66
+ data = event.to_h
67
+ span.input = safe_state(data[:state_before])
68
+ span.output = safe_state(data[:state_after])
69
+ span.metadata = data
70
+ span.end_time = Time.now.utc
71
+ Langfuse.update_span(span)
72
+ rescue => _e
73
+ end
74
+
75
+ def on_node_error(event)
76
+ return if event.node_name == :__start__
77
+
78
+ record = with_records_lock do
79
+ @records_by_node[event.node_name].pop
80
+ end
81
+
82
+ span = record && record[:span]
83
+ return unless span
84
+
85
+ span.metadata = event.to_h
86
+ span.end_time = Time.now.utc
87
+ Langfuse.update_span(span)
88
+ rescue => _e
89
+ end
90
+
91
+ # LLM lifecycle (called directly by LLM clients)
92
+ def on_llm_request(data, node_name)
93
+ record = with_records_lock do
94
+ stack = @records_by_node[node_name]
95
+ stack.empty? ? nil : stack[-1]
96
+ end
97
+ return unless record && record[:span]
98
+
99
+ # Prefer normalized payload from LLMBase implementations (e.g., ChatOpenAI)
100
+ input_payload = if data.is_a?(Hash)
101
+ data[:input] || data[:messages] || (data[:request] && data[:request][:messages])
102
+ else
103
+ data
104
+ end
105
+
106
+ generation = Langfuse.generation(
107
+ name: "llm-request-#{node_name}",
108
+ trace_id: @trace&.id,
109
+ parent_observation_id: record[:span].id,
110
+ model: data[:model],
111
+ input: input_payload,
112
+ metadata: (data.respond_to?(:to_h) ? data.to_h : data)
113
+ )
114
+
115
+ with_records_lock do
116
+ record[:generation] = generation
117
+ end
118
+ rescue => _e
119
+ end
120
+
121
+ def on_llm_response(data, node_name)
122
+ record = with_records_lock do
123
+ stack = @records_by_node[node_name]
124
+ stack.empty? ? nil : stack[-1]
125
+ end
126
+ return unless record && record[:generation]
127
+
128
+ generation = record[:generation]
129
+
130
+ if data.is_a?(Hash)
131
+ # Prefer normalized payload keys first
132
+ if data.key?(:output)
133
+ generation.output = data[:output]
134
+ else
135
+ # Fallback to OpenAI-style response structure
136
+ generation.output = data.dig(:choices, 0, :message, :content)
137
+ end
138
+
139
+ # Usage: support both normalized top-level and OpenAI usage block
140
+ prompt_tokens = data[:prompt_tokens] || data.dig(:usage, :prompt_tokens)
141
+ completion_tokens = data[:completion_tokens] || data.dig(:usage, :completion_tokens)
142
+ total_tokens = data[:total_tokens] || data.dig(:usage, :total_tokens)
143
+
144
+ if prompt_tokens || completion_tokens || total_tokens
145
+ begin
146
+ generation.usage = Langfuse::Models::Usage.new(
147
+ prompt_tokens: prompt_tokens,
148
+ completion_tokens: completion_tokens,
149
+ total_tokens: total_tokens
150
+ )
151
+ rescue => _e
152
+ # best-effort usage mapping
153
+ end
154
+ end
155
+ else
156
+ generation.output = data
157
+ end
158
+
159
+ generation.end_time = Time.now.utc
160
+ Langfuse.update_generation(generation)
161
+
162
+ with_records_lock do
163
+ record[:generation] = nil
164
+ end
165
+ rescue => _e
166
+ end
167
+
168
+ private
169
+
170
+ def ensure_trace!(event)
171
+ return @trace if @trace
172
+ @trace_mutex.synchronize do
173
+ return @trace if @trace
174
+ data = event.to_h
175
+ @trace = Langfuse.trace(
176
+ name: @name,
177
+ thread_id: data[:thread_id],
178
+ metadata: data,
179
+ input: safe_state(data[:initial_state])
180
+ )
181
+ end
182
+ @trace
183
+ end
184
+
185
+ def with_records_lock
186
+ @records_mutex.synchronize do
187
+ yield
188
+ end
189
+ end
190
+
191
+ def safe_state(state)
192
+ return nil if state.nil?
193
+ if state.respond_to?(:to_h)
194
+ state.to_h
195
+ else
196
+ state
197
+ end
198
+ rescue => _e
199
+ nil
200
+ end
201
+ end
202
+ end
203
+ end
204
+
205
+
@@ -0,0 +1,97 @@
1
+ require 'json'
2
+
3
+ module LangGraphRB
4
+ # Mixin to declare tool functions compatible with OpenAI tool/function calling
5
+ module ToolDefinition
6
+ def self.extended(base)
7
+ base.instance_variable_set(:@__tool_functions, {})
8
+ end
9
+
10
+ def define_function(name, description: "", &block)
11
+ class_name = self.name
12
+ fn_name = "#{class_name}__#{name}".to_sym
13
+ @__tool_functions ||= {}
14
+ @__tool_functions[fn_name] = {
15
+ name: fn_name,
16
+ description: description,
17
+ parameters: { type: 'object', properties: {}, required: [] }
18
+ }
19
+
20
+ # Evaluate the DSL inside a builder to collect properties
21
+ if block
22
+ builder = FunctionSchemaBuilder.new(@__tool_functions[fn_name][:parameters])
23
+ builder.instance_eval(&block)
24
+ end
25
+ end
26
+
27
+ def tool_functions
28
+ @__tool_functions || {}
29
+ end
30
+
31
+ def to_openai_tool_schema
32
+ # One class may expose multiple functions; return an array of tool entries
33
+ tool_functions.values.map do |fn|
34
+ {
35
+ type: 'function',
36
+ function: {
37
+ name: fn[:name].to_s,
38
+ description: fn[:description],
39
+ parameters: fn[:parameters]
40
+ }
41
+ }
42
+ end
43
+ end
44
+
45
+ class FunctionSchemaBuilder
46
+ def initialize(parameters)
47
+ @parameters = parameters
48
+ end
49
+
50
+ def property(name, type:, description: "", required: false)
51
+ @parameters[:properties][name.to_sym] = { type: type, description: description }
52
+ if required
53
+ @parameters[:required] ||= []
54
+ @parameters[:required] << name.to_sym
55
+ end
56
+ end
57
+ end
58
+ end
59
+
60
+ # Base class for tools using the ToolDefinition mixin
61
+ class ToolBase
62
+ extend ToolDefinition
63
+
64
+ def call(call_args)
65
+ # call_args: { name:, arguments: {} } or OpenAI-like hash
66
+ name = call_args[:name] || call_args['name']
67
+ args = call_args[:arguments] || call_args['arguments'] || {}
68
+ raise ArgumentError, 'Tool call missing name' if name.nil?
69
+
70
+ method_name = name.to_sym
71
+ unless respond_to?(method_name)
72
+ raise ArgumentError, "Undefined tool function: #{name}"
73
+ end
74
+
75
+ result = public_send(method_name, **symbolize_keys(args))
76
+ tool_response(result)
77
+ end
78
+
79
+ # Standardize tool responses; can be overridden by subclasses
80
+ def tool_response(payload)
81
+ payload
82
+ end
83
+
84
+ def to_openai_tool_schema
85
+ self.class.to_openai_tool_schema
86
+ end
87
+
88
+ private
89
+
90
+ def symbolize_keys(hash)
91
+ return hash unless hash.is_a?(Hash)
92
+ hash.transform_keys { |k| k.respond_to?(:to_sym) ? k.to_sym : k }
93
+ end
94
+ end
95
+ end
96
+
97
+
@@ -1,3 +1,3 @@
1
1
  module LangGraphRB
2
- VERSION = "0.1.4"
2
+ VERSION = "0.1.6"
3
3
  end
data/lib/langgraph_rb.rb CHANGED
@@ -9,6 +9,10 @@ require_relative 'langgraph_rb/stores/memory'
9
9
  require_relative 'langgraph_rb/observers/base'
10
10
  require_relative 'langgraph_rb/observers/logger'
11
11
  require_relative 'langgraph_rb/observers/structured'
12
+ require_relative 'langgraph_rb/observers/langfuse'
13
+ require_relative 'langgraph_rb/llm_base'
14
+ require_relative 'langgraph_rb/chat_openai'
15
+ require_relative 'langgraph_rb/tool_definition'
12
16
 
13
17
  module LangGraphRB
14
18
  class Error < StandardError; end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: langgraph_rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.4
4
+ version: 0.1.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Julian Toro
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2025-09-16 00:00:00.000000000 Z
11
+ date: 2025-09-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: json
@@ -24,6 +24,20 @@ dependencies:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
26
  version: '2.0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: openai
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: 0.24.0
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: 0.24.0
27
41
  - !ruby/object:Gem::Dependency
28
42
  name: bundler
29
43
  requirement: !ruby/object:Gem::Requirement
@@ -111,6 +125,8 @@ files:
111
125
  - SUMMARY.md
112
126
  - examples/advanced_example.rb
113
127
  - examples/basic_example.rb
128
+ - examples/chat_openai_tools_example.rb
129
+ - examples/conditional_example.rb
114
130
  - examples/initial_state_example.rb
115
131
  - examples/langfuse_example.rb
116
132
  - examples/llmnode_example.rb
@@ -119,16 +135,20 @@ files:
119
135
  - examples/simple_test.rb
120
136
  - langgraph_rb.gemspec
121
137
  - lib/langgraph_rb.rb
138
+ - lib/langgraph_rb/chat_openai.rb
122
139
  - lib/langgraph_rb/command.rb
123
140
  - lib/langgraph_rb/edge.rb
124
141
  - lib/langgraph_rb/graph.rb
142
+ - lib/langgraph_rb/llm_base.rb
125
143
  - lib/langgraph_rb/node.rb
126
144
  - lib/langgraph_rb/observers/base.rb
145
+ - lib/langgraph_rb/observers/langfuse.rb
127
146
  - lib/langgraph_rb/observers/logger.rb
128
147
  - lib/langgraph_rb/observers/structured.rb
129
148
  - lib/langgraph_rb/runner.rb
130
149
  - lib/langgraph_rb/state.rb
131
150
  - lib/langgraph_rb/stores/memory.rb
151
+ - lib/langgraph_rb/tool_definition.rb
132
152
  - lib/langgraph_rb/version.rb
133
153
  - test_runner.rb
134
154
  homepage: https://github.com/fulit103/langgraph_rb