langgraph_rb 0.1.7 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/examples/chat_openai_tools_example.rb +2 -6
- data/langgraph_rb.gemspec +0 -1
- data/lib/langgraph_rb/chat_openai.rb +11 -3
- data/lib/langgraph_rb/chat_ruby_openai.rb +171 -0
- data/lib/langgraph_rb/graph.rb +2 -2
- data/lib/langgraph_rb/node.rb +3 -7
- data/lib/langgraph_rb/version.rb +1 -1
- data/lib/langgraph_rb.rb +0 -1
- metadata +2 -15
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 6882db2f57746e6a18576eefd6bd26749009361880d1cd858ee91d3d06385abe
|
|
4
|
+
data.tar.gz: fa195eb1ed6763a3e0b53803792e92e7dd1dd327c9dd9372d7acf7ab3ea49d8f
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: aa652301646c12a2d2ade30faf5d5f463a89190813b9dbccb8c1b8df675bc6e0e8911c3358166224617239f763578b95fe78f27850e11690fc3e7726b85edc4c
|
|
7
|
+
data.tar.gz: 1a7fa46cf43d02b0c356c58f5e5aaed84023c5c2cece84eaf49ff084627ac046f1dd170f4e9cf2477ed1981d1ca8b7321ad416c9e4610c854793bdcdbf6b990d
|
|
@@ -38,7 +38,7 @@ end
|
|
|
38
38
|
def run_chat_openai_tools
|
|
39
39
|
tools = [MovieInfoTool.new(api_key: ENV['TMDB_API_KEY'] || 'demo')]
|
|
40
40
|
|
|
41
|
-
chat = LangGraphRB::ChatOpenAI.new(model: ENV.fetch('OPENAI_MODEL', 'gpt-4o-
|
|
41
|
+
chat = LangGraphRB::ChatOpenAI.new(model: ENV.fetch('OPENAI_MODEL', 'gpt-4o-mini'), temperature: 0)
|
|
42
42
|
chat = chat.bind_tools(tools)
|
|
43
43
|
|
|
44
44
|
observers = [LangGraphRB::Observers::LangfuseObserver.new(name: 'chat-openai-tools-example')]
|
|
@@ -50,11 +50,7 @@ def run_chat_openai_tools
|
|
|
50
50
|
{ messages: existing + [user_msg] }
|
|
51
51
|
end
|
|
52
52
|
|
|
53
|
-
|
|
54
|
-
add_message_callback = ->(message) { puts "New message: #{message}" }
|
|
55
|
-
sys_prompt = "You are a movie assistant. Use tools when helpful."
|
|
56
|
-
|
|
57
|
-
llm_node :chat, llm_client: chat, system_prompt: sys_prompt, add_message_callback: add_message_callback
|
|
53
|
+
llm_node :chat, llm_client: chat, system_prompt: "You are a movie assistant. Use tools when helpful."
|
|
58
54
|
|
|
59
55
|
tool_node :tool, tools: tools
|
|
60
56
|
|
data/langgraph_rb.gemspec
CHANGED
|
@@ -1,12 +1,20 @@
|
|
|
1
|
-
|
|
1
|
+
begin
|
|
2
|
+
require 'openai'
|
|
3
|
+
rescue LoadError
|
|
4
|
+
raise "LangGraphRB::ChatOpenAI requires gem 'openai' (~> 0.24). Add it to your Gemfile."
|
|
5
|
+
end
|
|
2
6
|
require_relative 'llm_base'
|
|
3
7
|
|
|
4
8
|
module LangGraphRB
|
|
5
9
|
# ChatOpenAI wrapper compatible with LLMBase, supporting tool binding
|
|
6
10
|
class ChatOpenAI < LLMBase
|
|
7
|
-
def initialize(model:, temperature: 0.0, api_key: ENV['OPENAI_API_KEY'])
|
|
11
|
+
def initialize(model:, temperature: 0.0, api_key: ENV['OPENAI_API_KEY'], client: nil)
|
|
8
12
|
super(model: model, temperature: temperature)
|
|
9
|
-
@client = OpenAI::Client.new(api_key: api_key)
|
|
13
|
+
@client = client || OpenAI::Client.new(api_key: api_key)
|
|
14
|
+
|
|
15
|
+
unless @client.respond_to?(:chat) && @client.chat.respond_to?(:completions)
|
|
16
|
+
raise "LangGraphRB::ChatOpenAI expects 'openai' gem ~> 0.24 (client.chat.completions.create)"
|
|
17
|
+
end
|
|
10
18
|
end
|
|
11
19
|
|
|
12
20
|
# Returns a new instance with tools bound (non-destructive)
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
begin
|
|
2
|
+
require 'openai'
|
|
3
|
+
rescue LoadError
|
|
4
|
+
raise "LangGraphRB::ChatRubyOpenAI requires gem 'ruby-openai' (~> 8.1). Add it to your Gemfile."
|
|
5
|
+
end
|
|
6
|
+
|
|
7
|
+
require_relative 'llm_base'
|
|
8
|
+
|
|
9
|
+
module LangGraphRB
|
|
10
|
+
class ChatRubyOpenAI < LLMBase
|
|
11
|
+
def initialize(model:, temperature: 0.0, api_key: ENV['OPENAI_API_KEY'], client: nil)
|
|
12
|
+
super(model: model, temperature: temperature)
|
|
13
|
+
@client = client || OpenAI::Client.new(access_token: api_key)
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def bind_tools(tools)
|
|
17
|
+
dup_instance = self.class.new(model: @model, temperature: @temperature)
|
|
18
|
+
dup_instance.instance_variable_set(:@client, @client)
|
|
19
|
+
dup_instance.instance_variable_set(:@bound_tools, Array(tools))
|
|
20
|
+
dup_instance
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def call(messages, tools: nil)
|
|
24
|
+
raise ArgumentError, 'messages must be an Array' unless messages.is_a?(Array)
|
|
25
|
+
|
|
26
|
+
tool_definitions = (tools || @bound_tools)
|
|
27
|
+
tool_schemas = Array(tool_definitions).flat_map do |tool|
|
|
28
|
+
if tool.respond_to?(:to_openai_tool_schema)
|
|
29
|
+
Array(tool.to_openai_tool_schema)
|
|
30
|
+
else
|
|
31
|
+
[tool]
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
request_payload = {
|
|
36
|
+
model: @model,
|
|
37
|
+
temperature: @temperature,
|
|
38
|
+
messages: normalize_messages(messages)
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if tool_schemas && !tool_schemas.empty?
|
|
42
|
+
request_payload[:tools] = tool_schemas
|
|
43
|
+
request_payload[:tool_choice] = 'auto'
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
notify_llm_request({
|
|
47
|
+
name: 'OpenAI::ChatCompletion',
|
|
48
|
+
model: @model,
|
|
49
|
+
model_parameters: { temperature: @temperature },
|
|
50
|
+
input: request_payload[:messages]
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
# ruby-openai 8.1.x: client.chat(parameters: {...}) returns a Hash
|
|
54
|
+
response = @client.chat(parameters: request_payload)
|
|
55
|
+
|
|
56
|
+
message = extract_message_from_response(response)
|
|
57
|
+
tool_calls = message[:tool_calls]
|
|
58
|
+
text_content = message[:content]
|
|
59
|
+
|
|
60
|
+
usage = extract_usage_from_response(response)
|
|
61
|
+
notify_llm_response({
|
|
62
|
+
output: tool_calls ? { tool_calls: tool_calls } : text_content,
|
|
63
|
+
prompt_tokens: usage[:prompt_tokens],
|
|
64
|
+
completion_tokens: usage[:completion_tokens],
|
|
65
|
+
total_tokens: usage[:total_tokens]
|
|
66
|
+
})
|
|
67
|
+
|
|
68
|
+
if tool_calls && !tool_calls.empty?
|
|
69
|
+
normalized_calls = tool_calls.map do |tc|
|
|
70
|
+
{
|
|
71
|
+
id: tc[:id],
|
|
72
|
+
name: tc[:function][:name],
|
|
73
|
+
arguments: parse_tool_arguments(tc[:function][:arguments])
|
|
74
|
+
}
|
|
75
|
+
end
|
|
76
|
+
{ tool_calls: normalized_calls }
|
|
77
|
+
else
|
|
78
|
+
text_content
|
|
79
|
+
end
|
|
80
|
+
rescue => e
|
|
81
|
+
notify_llm_error({ error: e.message })
|
|
82
|
+
raise e
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
private
|
|
86
|
+
|
|
87
|
+
def normalize_messages(messages)
|
|
88
|
+
messages.map do |m|
|
|
89
|
+
role = (m[:role] || m['role'])
|
|
90
|
+
content = m[:content] || m['content']
|
|
91
|
+
|
|
92
|
+
normalized = { role: role }
|
|
93
|
+
|
|
94
|
+
if content.is_a?(Array)
|
|
95
|
+
normalized[:content] = content
|
|
96
|
+
elsif content.nil?
|
|
97
|
+
normalized[:content] = nil
|
|
98
|
+
else
|
|
99
|
+
normalized[:content] = content.to_s
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
tool_calls = m[:tool_calls] || m['tool_calls']
|
|
103
|
+
if tool_calls && role.to_s == 'assistant'
|
|
104
|
+
normalized[:tool_calls] = Array(tool_calls).map do |tc|
|
|
105
|
+
if tc[:function] || tc['function']
|
|
106
|
+
fn = tc[:function] || tc['function']
|
|
107
|
+
raw_args = fn[:arguments] || fn['arguments']
|
|
108
|
+
args_str = raw_args.is_a?(String) ? raw_args : JSON.dump(raw_args || {})
|
|
109
|
+
{
|
|
110
|
+
id: (tc[:id] || tc['id']),
|
|
111
|
+
type: 'function',
|
|
112
|
+
function: {
|
|
113
|
+
name: (fn[:name] || fn['name']).to_s,
|
|
114
|
+
arguments: args_str
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
else
|
|
118
|
+
raw_args = tc[:arguments] || tc['arguments']
|
|
119
|
+
args_str = raw_args.is_a?(String) ? raw_args : JSON.dump(raw_args || {})
|
|
120
|
+
{
|
|
121
|
+
id: (tc[:id] || tc['id']),
|
|
122
|
+
type: 'function',
|
|
123
|
+
function: {
|
|
124
|
+
name: (tc[:name] || tc['name']).to_s,
|
|
125
|
+
arguments: args_str
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
end
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
if role.to_s == 'tool'
|
|
133
|
+
tool_call_id = m[:tool_call_id] || m['tool_call_id']
|
|
134
|
+
name = m[:name] || m['name']
|
|
135
|
+
normalized[:tool_call_id] = tool_call_id if tool_call_id
|
|
136
|
+
normalized[:name] = name if name
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
normalized
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
def parse_tool_arguments(raw)
|
|
144
|
+
return {} if raw.nil?
|
|
145
|
+
case raw
|
|
146
|
+
when String
|
|
147
|
+
JSON.parse(raw) rescue {}
|
|
148
|
+
when Hash
|
|
149
|
+
raw
|
|
150
|
+
else
|
|
151
|
+
{}
|
|
152
|
+
end
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def extract_message_from_response(response)
|
|
156
|
+
(response['choices'] || []).dig(0, 'message') || {}
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
def extract_usage_from_response(response)
|
|
160
|
+
usage = response['usage']
|
|
161
|
+
return { prompt_tokens: nil, completion_tokens: nil, total_tokens: nil } unless usage
|
|
162
|
+
{
|
|
163
|
+
prompt_tokens: usage['prompt_tokens'],
|
|
164
|
+
completion_tokens: usage['completion_tokens'],
|
|
165
|
+
total_tokens: usage['total_tokens']
|
|
166
|
+
}
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
|
data/lib/langgraph_rb/graph.rb
CHANGED
|
@@ -35,11 +35,11 @@ module LangGraphRB
|
|
|
35
35
|
end
|
|
36
36
|
end
|
|
37
37
|
|
|
38
|
-
def llm_node(name, llm_client:, system_prompt: nil,
|
|
38
|
+
def llm_node(name, llm_client:, system_prompt: nil, &block)
|
|
39
39
|
name = name.to_sym
|
|
40
40
|
raise GraphError, "Node '#{name}' already exists" if @nodes.key?(name)
|
|
41
41
|
|
|
42
|
-
@nodes[name] = LLMNode.new(name, llm_client: llm_client, system_prompt: system_prompt,
|
|
42
|
+
@nodes[name] = LLMNode.new(name, llm_client: llm_client, system_prompt: system_prompt, &block)
|
|
43
43
|
end
|
|
44
44
|
|
|
45
45
|
def tool_node(name, tools:, &block)
|
data/lib/langgraph_rb/node.rb
CHANGED
|
@@ -35,12 +35,11 @@ module LangGraphRB
|
|
|
35
35
|
|
|
36
36
|
# Specialized node for LLM calls
|
|
37
37
|
class LLMNode < Node
|
|
38
|
-
attr_reader :llm_client, :system_prompt
|
|
38
|
+
attr_reader :llm_client, :system_prompt
|
|
39
39
|
|
|
40
|
-
def initialize(name, llm_client:, system_prompt: nil,
|
|
40
|
+
def initialize(name, llm_client:, system_prompt: nil, &block)
|
|
41
41
|
@llm_client = llm_client
|
|
42
42
|
@system_prompt = system_prompt
|
|
43
|
-
@add_message_callback = add_message_callback
|
|
44
43
|
|
|
45
44
|
# Use default LLM behavior if no custom block provided
|
|
46
45
|
super(name, &(block || method(:default_llm_call)))
|
|
@@ -50,8 +49,7 @@ module LangGraphRB
|
|
|
50
49
|
# Auto-inject LLM config into the context for both default and custom blocks
|
|
51
50
|
merged_context = (context || {}).merge(
|
|
52
51
|
llm_client: @llm_client,
|
|
53
|
-
system_prompt: @system_prompt
|
|
54
|
-
add_message_callback: @add_message_callback
|
|
52
|
+
system_prompt: @system_prompt
|
|
55
53
|
)
|
|
56
54
|
|
|
57
55
|
begin
|
|
@@ -89,14 +87,12 @@ module LangGraphRB
|
|
|
89
87
|
content: nil,
|
|
90
88
|
tool_calls: response[:tool_calls]
|
|
91
89
|
}
|
|
92
|
-
@add_message_callback&.call(assistant_msg)
|
|
93
90
|
{
|
|
94
91
|
messages: (state[:messages] || []) + [assistant_msg],
|
|
95
92
|
tool_call: response[:tool_calls].first
|
|
96
93
|
}
|
|
97
94
|
else
|
|
98
95
|
assistant_msg = { role: 'assistant', content: response.to_s }
|
|
99
|
-
@add_message_callback&.call(assistant_msg)
|
|
100
96
|
{
|
|
101
97
|
messages: (state[:messages] || []) + [assistant_msg],
|
|
102
98
|
last_response: response.to_s
|
data/lib/langgraph_rb/version.rb
CHANGED
data/lib/langgraph_rb.rb
CHANGED
|
@@ -11,7 +11,6 @@ require_relative 'langgraph_rb/observers/logger'
|
|
|
11
11
|
require_relative 'langgraph_rb/observers/structured'
|
|
12
12
|
require_relative 'langgraph_rb/observers/langfuse'
|
|
13
13
|
require_relative 'langgraph_rb/llm_base'
|
|
14
|
-
require_relative 'langgraph_rb/chat_openai'
|
|
15
14
|
require_relative 'langgraph_rb/tool_definition'
|
|
16
15
|
|
|
17
16
|
module LangGraphRB
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: langgraph_rb
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.1.
|
|
4
|
+
version: 0.1.8
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Julian Toro
|
|
@@ -24,20 +24,6 @@ dependencies:
|
|
|
24
24
|
- - "~>"
|
|
25
25
|
- !ruby/object:Gem::Version
|
|
26
26
|
version: '2.0'
|
|
27
|
-
- !ruby/object:Gem::Dependency
|
|
28
|
-
name: openai
|
|
29
|
-
requirement: !ruby/object:Gem::Requirement
|
|
30
|
-
requirements:
|
|
31
|
-
- - "~>"
|
|
32
|
-
- !ruby/object:Gem::Version
|
|
33
|
-
version: 0.24.0
|
|
34
|
-
type: :runtime
|
|
35
|
-
prerelease: false
|
|
36
|
-
version_requirements: !ruby/object:Gem::Requirement
|
|
37
|
-
requirements:
|
|
38
|
-
- - "~>"
|
|
39
|
-
- !ruby/object:Gem::Version
|
|
40
|
-
version: 0.24.0
|
|
41
27
|
- !ruby/object:Gem::Dependency
|
|
42
28
|
name: bundler
|
|
43
29
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -136,6 +122,7 @@ files:
|
|
|
136
122
|
- langgraph_rb.gemspec
|
|
137
123
|
- lib/langgraph_rb.rb
|
|
138
124
|
- lib/langgraph_rb/chat_openai.rb
|
|
125
|
+
- lib/langgraph_rb/chat_ruby_openai.rb
|
|
139
126
|
- lib/langgraph_rb/command.rb
|
|
140
127
|
- lib/langgraph_rb/edge.rb
|
|
141
128
|
- lib/langgraph_rb/graph.rb
|