langgraph_rb 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b3189c15397a79e8ae6eb273adc89aa9148cdd82a6dfbaab22bd6ee0367c02cd
4
- data.tar.gz: ef3b7472a7b8aea84d2dc8a80e4647a4fda9d1346169e304c16357d507b02b84
3
+ metadata.gz: f5649cc9cef30c96380dc2ccd9a5c576af8d3efd7e02552b03d81cb45360f408
4
+ data.tar.gz: 3c706c127ac2fbd5e9d8a0be723575228a21d9ff91672169dc66174cd8dba3c5
5
5
  SHA512:
6
- metadata.gz: 8084cb63ceacdf4d207daa068a458efa596016232457d863f261533ba3a29de7c88bfc3bdca73a7fcac3312521091cccc61ecd1e99b2df72bfc8ae9b23c0d769
7
- data.tar.gz: 1b2315e962fa8204b4eaa487304492435b866ff20102444f9e4ff76dc8675461131352554e311e4b32aa5106331d55d5cba3d38ff9081021e95196a8c71a2858
6
+ metadata.gz: 7dd32ea2d6c98ae356596cee6ca0d697802e7107649faf63c598606f9b708a68cd7c72e72d4ad5423d136a72d74c242108c28b670f6ab2aea2cf3a18925f35d4
7
+ data.tar.gz: d6a30010935f797cc6ebbe3767db38c8726029956192b57cd69304d6b0238d3dd0bdb1b32d7d9b348bbc8ee4469f8b1fe62b750386834c9cb7ce52980f1f25a4
data/.gitignore CHANGED
@@ -7,4 +7,5 @@ vendor/bundle/
7
7
  .ruby-version
8
8
  .byebug_history
9
9
  coverage/
10
- *.log
10
+ *.log
11
+ .env
data/Gemfile CHANGED
@@ -6,4 +6,5 @@ group :development, :test do
6
6
  gem 'rspec', '~> 3.0'
7
7
  gem 'pry', '~> 0.14'
8
8
  gem 'rubocop', '~> 1.0'
9
+ gem 'langfuse', '~> 0.1'
9
10
  end
data/Gemfile.lock ADDED
@@ -0,0 +1,80 @@
1
+ PATH
2
+ remote: .
3
+ specs:
4
+ langgraph_rb (0.1.2)
5
+ json (~> 2.0)
6
+
7
+ GEM
8
+ remote: https://rubygems.org/
9
+ specs:
10
+ ast (2.4.3)
11
+ coderay (1.1.3)
12
+ concurrent-ruby (1.3.5)
13
+ diff-lcs (1.6.2)
14
+ json (2.13.2)
15
+ langfuse (0.1.1)
16
+ concurrent-ruby (~> 1.2)
17
+ sorbet-runtime (~> 0.5)
18
+ language_server-protocol (3.17.0.5)
19
+ lint_roller (1.1.0)
20
+ method_source (1.1.0)
21
+ parallel (1.27.0)
22
+ parser (3.3.9.0)
23
+ ast (~> 2.4.1)
24
+ racc
25
+ prism (1.4.0)
26
+ pry (0.15.2)
27
+ coderay (~> 1.1)
28
+ method_source (~> 1.0)
29
+ racc (1.8.1)
30
+ rainbow (3.1.1)
31
+ rake (13.3.0)
32
+ regexp_parser (2.11.1)
33
+ rspec (3.13.1)
34
+ rspec-core (~> 3.13.0)
35
+ rspec-expectations (~> 3.13.0)
36
+ rspec-mocks (~> 3.13.0)
37
+ rspec-core (3.13.5)
38
+ rspec-support (~> 3.13.0)
39
+ rspec-expectations (3.13.5)
40
+ diff-lcs (>= 1.2.0, < 2.0)
41
+ rspec-support (~> 3.13.0)
42
+ rspec-mocks (3.13.5)
43
+ diff-lcs (>= 1.2.0, < 2.0)
44
+ rspec-support (~> 3.13.0)
45
+ rspec-support (3.13.4)
46
+ rubocop (1.79.2)
47
+ json (~> 2.3)
48
+ language_server-protocol (~> 3.17.0.2)
49
+ lint_roller (~> 1.1.0)
50
+ parallel (~> 1.10)
51
+ parser (>= 3.3.0.2)
52
+ rainbow (>= 2.2.2, < 4.0)
53
+ regexp_parser (>= 2.9.3, < 3.0)
54
+ rubocop-ast (>= 1.46.0, < 2.0)
55
+ ruby-progressbar (~> 1.7)
56
+ unicode-display_width (>= 2.4.0, < 4.0)
57
+ rubocop-ast (1.46.0)
58
+ parser (>= 3.3.7.2)
59
+ prism (~> 1.4)
60
+ ruby-progressbar (1.13.0)
61
+ sorbet-runtime (0.6.12534)
62
+ unicode-display_width (3.1.4)
63
+ unicode-emoji (~> 4.0, >= 4.0.4)
64
+ unicode-emoji (4.0.4)
65
+
66
+ PLATFORMS
67
+ arm64-darwin-22
68
+ ruby
69
+
70
+ DEPENDENCIES
71
+ bundler (~> 2.0)
72
+ langfuse (~> 0.1)
73
+ langgraph_rb!
74
+ pry (~> 0.14)
75
+ rake (~> 13.0)
76
+ rspec (~> 3.0)
77
+ rubocop (~> 1.0)
78
+
79
+ BUNDLED WITH
80
+ 2.6.7
@@ -0,0 +1,94 @@
1
+ #!/usr/bin/env ruby
2
+ require 'langfuse'
3
+ require_relative '../lib/langgraph_rb'
4
+
5
+ url = 'https://us.cloud.langfuse.com'
6
+
7
+ Langfuse.configure do |config|
8
+ config.public_key = ENV['LANGFUSE_PUBLIC_KEY'] # e.g., 'pk-lf-...'
9
+ config.secret_key = ENV['LANGFUSE_SECRET_KEY'] # e.g., 'sk-lf-...'
10
+ config.host = url
11
+ config.debug = true # Enable debug logging
12
+ end
13
+
14
+
15
+ class LangfuseObserver < LangGraphRB::Observers::BaseObserver
16
+
17
+ def on_graph_start(event)
18
+ @trace ||= Langfuse.trace(
19
+ name: "graph-start2",
20
+ thread_id: event.thread_id,
21
+ metadata: event.to_h
22
+ )
23
+ end
24
+
25
+ def on_node_end(event)
26
+ span = Langfuse.span(
27
+ name: "node-#{event.node_name}",
28
+ trace_id: @trace.id,
29
+ input: event.to_h,
30
+ )
31
+ Langfuse.update_span(span)
32
+ end
33
+ end
34
+
35
+
36
+ def langfuse_example
37
+ puts "########################################################"
38
+ puts "########################################################"
39
+ puts "########################################################"
40
+ puts "=== Langfuse Example ==="
41
+
42
+ # Create a simple graph for demonstration
43
+
44
+ graph = LangGraphRB::Graph.new(state_class: LangGraphRB::State) do
45
+ node :process_message do |state|
46
+ sleep(Random.rand(0.1..0.5))
47
+ { message: "Processed: #{state[:message]}" }
48
+ end
49
+
50
+ conditional_edge :process_message, -> (state) {
51
+ sleep(Random.rand(0.1..0.5))
52
+ if state[:value] > 0 and state[:value] < 10
53
+ puts "Processed between 0 and 10"
54
+ return :process_between_0_and_10
55
+ elsif state[:value] > 10
56
+ puts "Processed greater than 10"
57
+ return :process_greater_than_10
58
+ else
59
+ puts "Processed less than 0"
60
+ return :process_less_than_0
61
+ end
62
+ }
63
+
64
+ node :process_between_0_and_10 do |state|
65
+ { message: "Processed between 0 and 10: #{state[:message]}" }
66
+ end
67
+
68
+ node :process_greater_than_10 do |state|
69
+ { message: "Processed greater than 10: #{state[:message]}" }
70
+ end
71
+
72
+ node :process_less_than_0 do |state|
73
+ { message: "Processed less than 0: #{state[:message]}" }
74
+ end
75
+
76
+ set_entry_point :process_message
77
+ set_finish_point :process_between_0_and_10
78
+ set_finish_point :process_greater_than_10
79
+ set_finish_point :process_less_than_0
80
+ end
81
+
82
+
83
+
84
+ graph.compile!
85
+ result = graph.invoke({ message: "Hello World", value: 31}, observers: [LangfuseObserver.new])
86
+ puts "Result: #{result}"
87
+ puts "########################################################"
88
+ puts "########################################################"
89
+ puts "########################################################"
90
+ puts "########################################################"
91
+ end
92
+
93
+ langfuse_example
94
+
@@ -0,0 +1,210 @@
1
+ #!/usr/bin/env ruby
2
+ require 'langfuse'
3
+ require_relative '../lib/langgraph_rb'
4
+
5
+ url = 'https://us.cloud.langfuse.com'
6
+
7
+ puts "LANGFUSE_PUBLIC_KEY: #{ENV['LANGFUSE_PUBLIC_KEY']}"
8
+ puts "LANGFUSE_SECRET_KEY: #{ENV['LANGFUSE_SECRET_KEY']}"
9
+ puts "LANGFUSE_HOST: #{url}"
10
+ puts "LANGFUSE_DEBUG: #{true}"
11
+
12
+ Langfuse.configure do |config|
13
+ config.public_key = ENV['LANGFUSE_PUBLIC_KEY'] # e.g., 'pk-lf-...'
14
+ config.secret_key = ENV['LANGFUSE_SECRET_KEY'] # e.g., 'sk-lf-...'
15
+ config.host = url
16
+ config.debug = true # Enable debug logging
17
+ end
18
+
19
+ # Very simple mock LLM client. Bring your own real client instead.
20
+ class MockLLMClient
21
+
22
+ def set_observers(observers, node_name)
23
+ @observers = observers
24
+ @node_name = node_name
25
+ end
26
+
27
+ def call(messages)
28
+
29
+ data = {
30
+ name: "MockLLMClient",
31
+ model: "MockLLM",
32
+ model_parameters: {
33
+ temperature: 0.5,
34
+ max_tokens: 1000
35
+ },
36
+ input: messages,
37
+ }
38
+
39
+ log_llm_request(data)
40
+
41
+ last_user_message = messages.reverse.find { |m| m[:role] == 'user' }&.dig(:content)
42
+ "(mock) You said: #{last_user_message}"
43
+
44
+ data = {
45
+ output: "(mock) You said: #{last_user_message}",
46
+ prompt_tokens: 100,
47
+ completion_tokens: 100,
48
+ total_tokens: 200,
49
+ }
50
+
51
+ log_llm_response(data)
52
+ end
53
+
54
+ def log_llm_request(data)
55
+ @observers&.each do |observer|
56
+ observer.on_llm_request(data, @node_name)
57
+ end
58
+ end
59
+
60
+ def log_llm_response(data)
61
+ @observers&.each do |observer|
62
+ observer.on_llm_response(data, @node_name)
63
+ end
64
+ end
65
+ end
66
+
67
+ class LangfuseObserver < LangGraphRB::Observers::BaseObserver
68
+
69
+ def initialize
70
+ @trace = nil
71
+ @spans_by_node = {}
72
+ end
73
+
74
+ def on_graph_start(event)
75
+ @trace ||= Langfuse.trace(
76
+ name: "llm-graph",
77
+ thread_id: event.thread_id,
78
+ metadata: event.to_h
79
+ )
80
+ end
81
+
82
+ def on_node_start(event)
83
+ @spans_by_node[event.node_name] ||= {
84
+ span: Langfuse.span(
85
+ name: "node-#{event.node_name}",
86
+ trace_id: @trace.id,
87
+ input: event.to_h,
88
+ ),
89
+ generation: nil
90
+ }
91
+ Langfuse.update_span(@spans_by_node[event.node_name][:span])
92
+ end
93
+
94
+ def on_node_end(event)
95
+ # @spans_by_node[event.node_name] ||= {
96
+ # span: Langfuse.span(
97
+ # name: "node-#{event.node_name}",
98
+ # trace_id: @trace.id,
99
+ # input: event.to_h,
100
+ # ),
101
+ # generation: nil
102
+ # }
103
+ # Langfuse.update_span(@spans_by_node[event.node_name][:span])
104
+ end
105
+
106
+ def on_llm_request(event, node_name)
107
+ puts "########################################################"
108
+ puts "on_llm_request: #{event}"
109
+ puts "node_name: #{node_name}"
110
+ puts "spans_by_node: #{@spans_by_node}"
111
+ puts "$$$$--------------------------------------------------------$$$$"
112
+ span = @spans_by_node[node_name][:span]
113
+ generation = Langfuse.generation(
114
+ name: event[:name],
115
+ trace_id: @trace.id,
116
+ parent_observation_id: span.id,
117
+ model: event[:model],
118
+ model_parameters: event[:model_parameters],
119
+ input: event[:input]
120
+ )
121
+
122
+ @spans_by_node[node_name.to_sym][:generation] = generation
123
+ end
124
+
125
+ def on_llm_response(event, node_name)
126
+ puts "########################################################"
127
+ puts "on_llm_response: #{event}"
128
+ puts "node_name: #{node_name}"
129
+ puts "spans_by_node: #{@spans_by_node}"
130
+ puts "$$$$--------------------------------------------------------$$$$"
131
+
132
+ generation = @spans_by_node[node_name][:generation]
133
+
134
+ return if generation.nil?
135
+
136
+ generation.output = event[:output]
137
+ generation.usage = Langfuse::Models::Usage.new(
138
+ prompt_tokens: event[:prompt_tokens],
139
+ completion_tokens: event[:completion_tokens],
140
+ total_tokens: event[:total_tokens]
141
+ )
142
+ Langfuse.update_generation(generation)
143
+
144
+ @spans_by_node[node_name.to_sym][:generation] = nil
145
+ end
146
+ end
147
+
148
+ def llmnode_example
149
+ puts "=== LLMNode Example ==="
150
+
151
+ mock_llm = MockLLMClient.new
152
+
153
+ # Build a minimal chat graph using an LLM node.
154
+ graph = LangGraphRB::Graph.new(state_class: LangGraphRB::State) do
155
+ # Collect user input into the message history
156
+ node :receive_input do |state|
157
+ user_msg = { role: 'user', content: state[:input].to_s }
158
+ existing = state[:messages] || []
159
+ { messages: existing + [user_msg], last_user_message: state[:input].to_s }
160
+ end
161
+
162
+ # LLM node – uses a custom block to call the provided client via context
163
+ # Note: The default LLM behavior can be used once the core library wires a default callable.
164
+ llm_node :chat, llm_client: mock_llm, system_prompt: "You are a helpful assistant." do |state, context|
165
+ messages = state[:messages] || []
166
+
167
+ puts "########################################################"
168
+ puts "########################################################"
169
+
170
+ puts "context: #{context}"
171
+
172
+ puts "########################################################"
173
+ puts "########################################################"
174
+
175
+ # Optionally prepend a system prompt
176
+ if context[:system_prompt]
177
+ messages = [{ role: 'system', content: context[:system_prompt] }] + messages
178
+ end
179
+
180
+ response = context[:llm_client].call(messages)
181
+
182
+ assistant_msg = { role: 'assistant', content: response }
183
+ { messages: (state[:messages] || []) + [assistant_msg], last_response: response }
184
+ end
185
+
186
+ set_entry_point :receive_input
187
+ edge :receive_input, :chat
188
+ set_finish_point :chat
189
+ end
190
+
191
+ graph.compile!
192
+
193
+ # Single-turn example
194
+ result = graph.invoke({ messages: [], input: "Hello there!" }, observers: [LangfuseObserver.new])
195
+
196
+ puts "Assistant: #{result[:last_response]}"
197
+ puts "Messages:"
198
+ (result[:messages] || []).each { |m| puts " - #{m[:role]}: #{m[:content]}" }
199
+
200
+ # Multi-turn example (reuse message history)
201
+ second = graph.invoke({ messages: result[:messages], input: "What's the weather like?" })
202
+
203
+ puts "\nAssistant (turn 2): #{second[:last_response]}"
204
+ puts "Messages (after 2 turns):"
205
+ (second[:messages] || []).each { |m| puts " - #{m[:role]}: #{m[:content]}" }
206
+ end
207
+
208
+ llmnode_example
209
+
210
+
@@ -11,7 +11,7 @@ module LangGraphRB
11
11
 
12
12
  # Execute the node with the given state and context
13
13
  # Returns either a Hash (state delta), Command, or Send object
14
- def call(state, context: nil)
14
+ def call(state, context: nil, observers: [])
15
15
  case @callable.arity
16
16
  when 0
17
17
  @callable.call
@@ -40,27 +40,47 @@ module LangGraphRB
40
40
  def initialize(name, llm_client:, system_prompt: nil, &block)
41
41
  @llm_client = llm_client
42
42
  @system_prompt = system_prompt
43
-
44
- super(name, &block)
43
+
44
+ # Use default LLM behavior if no custom block provided
45
+ super(name, &(block || method(:default_llm_call)))
45
46
  end
46
47
 
47
- def call(state, context: nil)
48
- # If no custom block provided, use default LLM behavior
49
- if @callable.nil? || @callable == method(:default_llm_call)
50
- default_llm_call(state, context)
48
+ def call(state, context: nil, observers: [])
49
+ # Auto-inject LLM config into the context for both default and custom blocks
50
+ merged_context = (context || {}).merge(
51
+ llm_client: @llm_client,
52
+ system_prompt: @system_prompt
53
+ )
54
+
55
+ begin
56
+ @llm_client&.set_observers(observers, @name) if observers.any?
57
+ rescue => e
58
+ raise NodeError, "Error setting observers for LLM client: #{e.message}"
59
+ end
60
+
61
+ # Delegate to Node's dispatcher so arity (0/1/2) is handled uniformly
62
+ case @callable.arity
63
+ when 0
64
+ @callable.call
65
+ when 1
66
+ @callable.call(state)
51
67
  else
52
- super(state, context: context)
68
+ @callable.call(state, merged_context)
53
69
  end
70
+ rescue => e
71
+ raise NodeError, "Error executing node '#{@name}': #{e.message}"
54
72
  end
55
73
 
56
74
  private
57
75
 
58
76
  def default_llm_call(state, context)
59
77
  messages = state[:messages] || []
60
- messages = [@system_prompt] + messages if @system_prompt && !messages.empty?
61
-
62
- response = @llm_client.call(messages)
63
-
78
+ if context && context[:system_prompt]
79
+ messages = [{ role: 'system', content: context[:system_prompt] }] + messages
80
+ end
81
+
82
+ response = (context[:llm_client] || @llm_client).call(messages)
83
+
64
84
  {
65
85
  messages: [{ role: 'assistant', content: response }],
66
86
  last_response: response
@@ -60,6 +60,16 @@ module LangGraphRB
60
60
  # Override in subclasses if cleanup needed
61
61
  end
62
62
 
63
+ # Called when LLM requests occur
64
+ def on_llm_request(event)
65
+ # Override in subclasses
66
+ end
67
+
68
+ # Called when LLM responses occur
69
+ def on_llm_response(event)
70
+ # Override in subclasses
71
+ end
72
+
63
73
  protected
64
74
 
65
75
  # Helper method to create standardized event structure
@@ -275,7 +275,7 @@ module LangGraphRB
275
275
 
276
276
  start_time = Time.now
277
277
  begin
278
- result = node.call(state, context: context)
278
+ result = node.call(state, context: context, observers: @observers)
279
279
  duration = Time.now - start_time
280
280
 
281
281
  processed_result = process_node_result(node.name, state, result, step)
@@ -1,3 +1,3 @@
1
1
  module LangGraphRB
2
- VERSION = "0.1.2"
2
+ VERSION = "0.1.4"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: langgraph_rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.2
4
+ version: 0.1.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Julian Toro
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2025-08-06 00:00:00.000000000 Z
11
+ date: 2025-09-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: json
@@ -106,11 +106,14 @@ extra_rdoc_files: []
106
106
  files:
107
107
  - ".gitignore"
108
108
  - Gemfile
109
+ - Gemfile.lock
109
110
  - README.md
110
111
  - SUMMARY.md
111
112
  - examples/advanced_example.rb
112
113
  - examples/basic_example.rb
113
114
  - examples/initial_state_example.rb
115
+ - examples/langfuse_example.rb
116
+ - examples/llmnode_example.rb
114
117
  - examples/observer_example.rb
115
118
  - examples/reducers_example.rb
116
119
  - examples/simple_test.rb