langgraph_rb 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/langgraph_rb.gemspec +0 -1
- data/lib/langgraph_rb/chat_openai.rb +17 -3
- data/lib/langgraph_rb/chat_ruby_openai.rb +171 -0
- data/lib/langgraph_rb/llm_base.rb +10 -0
- data/lib/langgraph_rb/observers/base.rb +4 -0
- data/lib/langgraph_rb/observers/langfuse.rb +18 -0
- data/lib/langgraph_rb/version.rb +1 -1
- data/lib/langgraph_rb.rb +0 -1
- metadata +3 -16
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 6882db2f57746e6a18576eefd6bd26749009361880d1cd858ee91d3d06385abe
|
|
4
|
+
data.tar.gz: fa195eb1ed6763a3e0b53803792e92e7dd1dd327c9dd9372d7acf7ab3ea49d8f
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: aa652301646c12a2d2ade30faf5d5f463a89190813b9dbccb8c1b8df675bc6e0e8911c3358166224617239f763578b95fe78f27850e11690fc3e7726b85edc4c
|
|
7
|
+
data.tar.gz: 1a7fa46cf43d02b0c356c58f5e5aaed84023c5c2cece84eaf49ff084627ac046f1dd170f4e9cf2477ed1981d1ca8b7321ad416c9e4610c854793bdcdbf6b990d
|
data/langgraph_rb.gemspec
CHANGED
|
@@ -1,12 +1,20 @@
|
|
|
1
|
-
|
|
1
|
+
begin
|
|
2
|
+
require 'openai'
|
|
3
|
+
rescue LoadError
|
|
4
|
+
raise "LangGraphRB::ChatOpenAI requires gem 'openai' (~> 0.24). Add it to your Gemfile."
|
|
5
|
+
end
|
|
2
6
|
require_relative 'llm_base'
|
|
3
7
|
|
|
4
8
|
module LangGraphRB
|
|
5
9
|
# ChatOpenAI wrapper compatible with LLMBase, supporting tool binding
|
|
6
10
|
class ChatOpenAI < LLMBase
|
|
7
|
-
def initialize(model:, temperature: 0.0, api_key: ENV['OPENAI_API_KEY'])
|
|
11
|
+
def initialize(model:, temperature: 0.0, api_key: ENV['OPENAI_API_KEY'], client: nil)
|
|
8
12
|
super(model: model, temperature: temperature)
|
|
9
|
-
@client = OpenAI::Client.new(api_key: api_key)
|
|
13
|
+
@client = client || OpenAI::Client.new(api_key: api_key)
|
|
14
|
+
|
|
15
|
+
unless @client.respond_to?(:chat) && @client.chat.respond_to?(:completions)
|
|
16
|
+
raise "LangGraphRB::ChatOpenAI expects 'openai' gem ~> 0.24 (client.chat.completions.create)"
|
|
17
|
+
end
|
|
10
18
|
end
|
|
11
19
|
|
|
12
20
|
# Returns a new instance with tools bound (non-destructive)
|
|
@@ -77,8 +85,14 @@ module LangGraphRB
|
|
|
77
85
|
else
|
|
78
86
|
text_content
|
|
79
87
|
end
|
|
88
|
+
rescue => e
|
|
89
|
+
notify_llm_error({
|
|
90
|
+
error: e.message
|
|
91
|
+
})
|
|
92
|
+
raise e
|
|
80
93
|
end
|
|
81
94
|
|
|
95
|
+
|
|
82
96
|
private
|
|
83
97
|
|
|
84
98
|
def normalize_messages(messages)
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
begin
|
|
2
|
+
require 'openai'
|
|
3
|
+
rescue LoadError
|
|
4
|
+
raise "LangGraphRB::ChatRubyOpenAI requires gem 'ruby-openai' (~> 8.1). Add it to your Gemfile."
|
|
5
|
+
end
|
|
6
|
+
|
|
7
|
+
require_relative 'llm_base'
|
|
8
|
+
|
|
9
|
+
module LangGraphRB
|
|
10
|
+
class ChatRubyOpenAI < LLMBase
|
|
11
|
+
def initialize(model:, temperature: 0.0, api_key: ENV['OPENAI_API_KEY'], client: nil)
|
|
12
|
+
super(model: model, temperature: temperature)
|
|
13
|
+
@client = client || OpenAI::Client.new(access_token: api_key)
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def bind_tools(tools)
|
|
17
|
+
dup_instance = self.class.new(model: @model, temperature: @temperature)
|
|
18
|
+
dup_instance.instance_variable_set(:@client, @client)
|
|
19
|
+
dup_instance.instance_variable_set(:@bound_tools, Array(tools))
|
|
20
|
+
dup_instance
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def call(messages, tools: nil)
|
|
24
|
+
raise ArgumentError, 'messages must be an Array' unless messages.is_a?(Array)
|
|
25
|
+
|
|
26
|
+
tool_definitions = (tools || @bound_tools)
|
|
27
|
+
tool_schemas = Array(tool_definitions).flat_map do |tool|
|
|
28
|
+
if tool.respond_to?(:to_openai_tool_schema)
|
|
29
|
+
Array(tool.to_openai_tool_schema)
|
|
30
|
+
else
|
|
31
|
+
[tool]
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
request_payload = {
|
|
36
|
+
model: @model,
|
|
37
|
+
temperature: @temperature,
|
|
38
|
+
messages: normalize_messages(messages)
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if tool_schemas && !tool_schemas.empty?
|
|
42
|
+
request_payload[:tools] = tool_schemas
|
|
43
|
+
request_payload[:tool_choice] = 'auto'
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
notify_llm_request({
|
|
47
|
+
name: 'OpenAI::ChatCompletion',
|
|
48
|
+
model: @model,
|
|
49
|
+
model_parameters: { temperature: @temperature },
|
|
50
|
+
input: request_payload[:messages]
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
# ruby-openai 8.1.x: client.chat(parameters: {...}) returns a Hash
|
|
54
|
+
response = @client.chat(parameters: request_payload)
|
|
55
|
+
|
|
56
|
+
message = extract_message_from_response(response)
|
|
57
|
+
tool_calls = message[:tool_calls]
|
|
58
|
+
text_content = message[:content]
|
|
59
|
+
|
|
60
|
+
usage = extract_usage_from_response(response)
|
|
61
|
+
notify_llm_response({
|
|
62
|
+
output: tool_calls ? { tool_calls: tool_calls } : text_content,
|
|
63
|
+
prompt_tokens: usage[:prompt_tokens],
|
|
64
|
+
completion_tokens: usage[:completion_tokens],
|
|
65
|
+
total_tokens: usage[:total_tokens]
|
|
66
|
+
})
|
|
67
|
+
|
|
68
|
+
if tool_calls && !tool_calls.empty?
|
|
69
|
+
normalized_calls = tool_calls.map do |tc|
|
|
70
|
+
{
|
|
71
|
+
id: tc[:id],
|
|
72
|
+
name: tc[:function][:name],
|
|
73
|
+
arguments: parse_tool_arguments(tc[:function][:arguments])
|
|
74
|
+
}
|
|
75
|
+
end
|
|
76
|
+
{ tool_calls: normalized_calls }
|
|
77
|
+
else
|
|
78
|
+
text_content
|
|
79
|
+
end
|
|
80
|
+
rescue => e
|
|
81
|
+
notify_llm_error({ error: e.message })
|
|
82
|
+
raise e
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
private
|
|
86
|
+
|
|
87
|
+
def normalize_messages(messages)
|
|
88
|
+
messages.map do |m|
|
|
89
|
+
role = (m[:role] || m['role'])
|
|
90
|
+
content = m[:content] || m['content']
|
|
91
|
+
|
|
92
|
+
normalized = { role: role }
|
|
93
|
+
|
|
94
|
+
if content.is_a?(Array)
|
|
95
|
+
normalized[:content] = content
|
|
96
|
+
elsif content.nil?
|
|
97
|
+
normalized[:content] = nil
|
|
98
|
+
else
|
|
99
|
+
normalized[:content] = content.to_s
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
tool_calls = m[:tool_calls] || m['tool_calls']
|
|
103
|
+
if tool_calls && role.to_s == 'assistant'
|
|
104
|
+
normalized[:tool_calls] = Array(tool_calls).map do |tc|
|
|
105
|
+
if tc[:function] || tc['function']
|
|
106
|
+
fn = tc[:function] || tc['function']
|
|
107
|
+
raw_args = fn[:arguments] || fn['arguments']
|
|
108
|
+
args_str = raw_args.is_a?(String) ? raw_args : JSON.dump(raw_args || {})
|
|
109
|
+
{
|
|
110
|
+
id: (tc[:id] || tc['id']),
|
|
111
|
+
type: 'function',
|
|
112
|
+
function: {
|
|
113
|
+
name: (fn[:name] || fn['name']).to_s,
|
|
114
|
+
arguments: args_str
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
else
|
|
118
|
+
raw_args = tc[:arguments] || tc['arguments']
|
|
119
|
+
args_str = raw_args.is_a?(String) ? raw_args : JSON.dump(raw_args || {})
|
|
120
|
+
{
|
|
121
|
+
id: (tc[:id] || tc['id']),
|
|
122
|
+
type: 'function',
|
|
123
|
+
function: {
|
|
124
|
+
name: (tc[:name] || tc['name']).to_s,
|
|
125
|
+
arguments: args_str
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
end
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
if role.to_s == 'tool'
|
|
133
|
+
tool_call_id = m[:tool_call_id] || m['tool_call_id']
|
|
134
|
+
name = m[:name] || m['name']
|
|
135
|
+
normalized[:tool_call_id] = tool_call_id if tool_call_id
|
|
136
|
+
normalized[:name] = name if name
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
normalized
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
def parse_tool_arguments(raw)
|
|
144
|
+
return {} if raw.nil?
|
|
145
|
+
case raw
|
|
146
|
+
when String
|
|
147
|
+
JSON.parse(raw) rescue {}
|
|
148
|
+
when Hash
|
|
149
|
+
raw
|
|
150
|
+
else
|
|
151
|
+
{}
|
|
152
|
+
end
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def extract_message_from_response(response)
|
|
156
|
+
(response['choices'] || []).dig(0, 'message') || {}
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
def extract_usage_from_response(response)
|
|
160
|
+
usage = response['usage']
|
|
161
|
+
return { prompt_tokens: nil, completion_tokens: nil, total_tokens: nil } unless usage
|
|
162
|
+
{
|
|
163
|
+
prompt_tokens: usage['prompt_tokens'],
|
|
164
|
+
completion_tokens: usage['completion_tokens'],
|
|
165
|
+
total_tokens: usage['total_tokens']
|
|
166
|
+
}
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
|
|
@@ -54,6 +54,16 @@ module LangGraphRB
|
|
|
54
54
|
end
|
|
55
55
|
end
|
|
56
56
|
end
|
|
57
|
+
|
|
58
|
+
def notify_llm_error(payload)
|
|
59
|
+
@observers.each do |observer|
|
|
60
|
+
begin
|
|
61
|
+
observer.on_llm_error(payload, @node_name)
|
|
62
|
+
rescue => _e
|
|
63
|
+
# Ignore observer errors
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
end
|
|
57
67
|
end
|
|
58
68
|
end
|
|
59
69
|
|
|
@@ -165,6 +165,24 @@ module LangGraphRB
|
|
|
165
165
|
rescue => _e
|
|
166
166
|
end
|
|
167
167
|
|
|
168
|
+
def on_llm_error(data, node_name)
|
|
169
|
+
record = with_records_lock do
|
|
170
|
+
stack = @records_by_node[node_name]
|
|
171
|
+
stack.empty? ? nil : stack[-1]
|
|
172
|
+
end
|
|
173
|
+
return unless record && record[:generation]
|
|
174
|
+
|
|
175
|
+
generation = record[:generation]
|
|
176
|
+
generation.output = data[:error]
|
|
177
|
+
generation.end_time = Time.now.utc
|
|
178
|
+
Langfuse.update_generation(generation)
|
|
179
|
+
|
|
180
|
+
with_records_lock do
|
|
181
|
+
record[:generation] = nil
|
|
182
|
+
end
|
|
183
|
+
rescue => _e
|
|
184
|
+
end
|
|
185
|
+
|
|
168
186
|
private
|
|
169
187
|
|
|
170
188
|
def ensure_trace!(event)
|
data/lib/langgraph_rb/version.rb
CHANGED
data/lib/langgraph_rb.rb
CHANGED
|
@@ -11,7 +11,6 @@ require_relative 'langgraph_rb/observers/logger'
|
|
|
11
11
|
require_relative 'langgraph_rb/observers/structured'
|
|
12
12
|
require_relative 'langgraph_rb/observers/langfuse'
|
|
13
13
|
require_relative 'langgraph_rb/llm_base'
|
|
14
|
-
require_relative 'langgraph_rb/chat_openai'
|
|
15
14
|
require_relative 'langgraph_rb/tool_definition'
|
|
16
15
|
|
|
17
16
|
module LangGraphRB
|
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: langgraph_rb
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.1.
|
|
4
|
+
version: 0.1.8
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Julian Toro
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: exe
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date: 2025-
|
|
11
|
+
date: 2025-10-08 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
14
|
name: json
|
|
@@ -24,20 +24,6 @@ dependencies:
|
|
|
24
24
|
- - "~>"
|
|
25
25
|
- !ruby/object:Gem::Version
|
|
26
26
|
version: '2.0'
|
|
27
|
-
- !ruby/object:Gem::Dependency
|
|
28
|
-
name: openai
|
|
29
|
-
requirement: !ruby/object:Gem::Requirement
|
|
30
|
-
requirements:
|
|
31
|
-
- - "~>"
|
|
32
|
-
- !ruby/object:Gem::Version
|
|
33
|
-
version: 0.24.0
|
|
34
|
-
type: :runtime
|
|
35
|
-
prerelease: false
|
|
36
|
-
version_requirements: !ruby/object:Gem::Requirement
|
|
37
|
-
requirements:
|
|
38
|
-
- - "~>"
|
|
39
|
-
- !ruby/object:Gem::Version
|
|
40
|
-
version: 0.24.0
|
|
41
27
|
- !ruby/object:Gem::Dependency
|
|
42
28
|
name: bundler
|
|
43
29
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -136,6 +122,7 @@ files:
|
|
|
136
122
|
- langgraph_rb.gemspec
|
|
137
123
|
- lib/langgraph_rb.rb
|
|
138
124
|
- lib/langgraph_rb/chat_openai.rb
|
|
125
|
+
- lib/langgraph_rb/chat_ruby_openai.rb
|
|
139
126
|
- lib/langgraph_rb/command.rb
|
|
140
127
|
- lib/langgraph_rb/edge.rb
|
|
141
128
|
- lib/langgraph_rb/graph.rb
|