langgraph_rb 0.1.7 → 0.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/examples/chat_openai_tools_example.rb +23 -22
- data/langgraph_rb.gemspec +0 -1
- data/lib/langgraph_rb/chat_openai.rb +11 -3
- data/lib/langgraph_rb/chat_ruby_openai.rb +171 -0
- data/lib/langgraph_rb/graph.rb +2 -2
- data/lib/langgraph_rb/node.rb +4 -8
- data/lib/langgraph_rb/tool_definition.rb +1 -1
- data/lib/langgraph_rb/version.rb +1 -1
- data/lib/langgraph_rb.rb +0 -1
- metadata +2 -15
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 299430b38fa98a9d82a902479bfd22256192300a9aa57635f8fc3240bbb662d6
|
|
4
|
+
data.tar.gz: 9dddfa3c24e5425fee43718aa06df70258cc3659ee624a5e5303f7eb59aa8f21
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 65eb35f3aba912b4aa69513c02561d9a953349a20475ecec254ed078e708cb19c603b13e154cd0145e05a50c55bfdefe006283bfaf0f397c6e53c834fa033954
|
|
7
|
+
data.tar.gz: 03ac84c5bfed8b1bc67ee51517d243a743e09dfb671d3fce75cb3e79af299098253f65d8d73e1df10ec3041a4d995fbec4c3497245299b33b9bf7721878daae3
|
|
@@ -3,6 +3,9 @@ require 'pry'
|
|
|
3
3
|
require 'pry-byebug'
|
|
4
4
|
require 'langfuse'
|
|
5
5
|
require_relative '../lib/langgraph_rb'
|
|
6
|
+
require 'openai'
|
|
7
|
+
require_relative '../lib/langgraph_rb/chat_openai'
|
|
8
|
+
|
|
6
9
|
|
|
7
10
|
url = 'https://us.cloud.langfuse.com'
|
|
8
11
|
|
|
@@ -13,32 +16,34 @@ Langfuse.configure do |config|
|
|
|
13
16
|
config.debug = true # Enable debug logging
|
|
14
17
|
end
|
|
15
18
|
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
19
|
+
module Tool
|
|
20
|
+
class MovieInfoTool < LangGraphRB::ToolBase
|
|
21
|
+
define_function :search_movie, description: "MovieInfoTool: Search for a movie by title" do
|
|
22
|
+
property :query, type: "string", description: "The movie title to search for", required: true
|
|
23
|
+
end
|
|
20
24
|
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
25
|
+
define_function :get_movie_details, description: "MovieInfoTool: Get detailed information about a specific movie" do
|
|
26
|
+
property :movie_id, type: "integer", description: "The TMDb ID of the movie", required: true
|
|
27
|
+
end
|
|
24
28
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
29
|
+
def initialize(api_key: "demo")
|
|
30
|
+
@api_key = api_key
|
|
31
|
+
end
|
|
28
32
|
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
33
|
+
def search_movie(query:)
|
|
34
|
+
tool_response({ results: [ { id: 603, title: query, year: 1999 } ] })
|
|
35
|
+
end
|
|
32
36
|
|
|
33
|
-
|
|
34
|
-
|
|
37
|
+
def get_movie_details(movie_id:)
|
|
38
|
+
tool_response({ id: movie_id, title: "The Matrix", overview: "A computer hacker learns the truth of reality." })
|
|
39
|
+
end
|
|
35
40
|
end
|
|
36
41
|
end
|
|
37
42
|
|
|
38
43
|
def run_chat_openai_tools
|
|
39
|
-
tools = [MovieInfoTool.new(api_key: ENV['TMDB_API_KEY'] || 'demo')]
|
|
44
|
+
tools = [Tool::MovieInfoTool.new(api_key: ENV['TMDB_API_KEY'] || 'demo')]
|
|
40
45
|
|
|
41
|
-
chat = LangGraphRB::ChatOpenAI.new(model: ENV.fetch('OPENAI_MODEL', 'gpt-4o-
|
|
46
|
+
chat = LangGraphRB::ChatOpenAI.new(model: ENV.fetch('OPENAI_MODEL', 'gpt-4o-mini'), temperature: 0)
|
|
42
47
|
chat = chat.bind_tools(tools)
|
|
43
48
|
|
|
44
49
|
observers = [LangGraphRB::Observers::LangfuseObserver.new(name: 'chat-openai-tools-example')]
|
|
@@ -50,11 +55,7 @@ def run_chat_openai_tools
|
|
|
50
55
|
{ messages: existing + [user_msg] }
|
|
51
56
|
end
|
|
52
57
|
|
|
53
|
-
|
|
54
|
-
add_message_callback = ->(message) { puts "New message: #{message}" }
|
|
55
|
-
sys_prompt = "You are a movie assistant. Use tools when helpful."
|
|
56
|
-
|
|
57
|
-
llm_node :chat, llm_client: chat, system_prompt: sys_prompt, add_message_callback: add_message_callback
|
|
58
|
+
llm_node :chat, llm_client: chat, system_prompt: "You are a movie assistant. Use tools when helpful."
|
|
58
59
|
|
|
59
60
|
tool_node :tool, tools: tools
|
|
60
61
|
|
data/langgraph_rb.gemspec
CHANGED
|
@@ -1,12 +1,20 @@
|
|
|
1
|
-
|
|
1
|
+
begin
|
|
2
|
+
require 'openai'
|
|
3
|
+
rescue LoadError
|
|
4
|
+
raise "LangGraphRB::ChatOpenAI requires gem 'openai' (~> 0.24). Add it to your Gemfile."
|
|
5
|
+
end
|
|
2
6
|
require_relative 'llm_base'
|
|
3
7
|
|
|
4
8
|
module LangGraphRB
|
|
5
9
|
# ChatOpenAI wrapper compatible with LLMBase, supporting tool binding
|
|
6
10
|
class ChatOpenAI < LLMBase
|
|
7
|
-
def initialize(model:, temperature: 0.0, api_key: ENV['OPENAI_API_KEY'])
|
|
11
|
+
def initialize(model:, temperature: 0.0, api_key: ENV['OPENAI_API_KEY'], client: nil)
|
|
8
12
|
super(model: model, temperature: temperature)
|
|
9
|
-
@client = OpenAI::Client.new(api_key: api_key)
|
|
13
|
+
@client = client || OpenAI::Client.new(api_key: api_key)
|
|
14
|
+
|
|
15
|
+
unless @client.respond_to?(:chat) && @client.chat.respond_to?(:completions)
|
|
16
|
+
raise "LangGraphRB::ChatOpenAI expects 'openai' gem ~> 0.24 (client.chat.completions.create)"
|
|
17
|
+
end
|
|
10
18
|
end
|
|
11
19
|
|
|
12
20
|
# Returns a new instance with tools bound (non-destructive)
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
begin
|
|
2
|
+
require 'openai'
|
|
3
|
+
rescue LoadError
|
|
4
|
+
raise "LangGraphRB::ChatRubyOpenAI requires gem 'ruby-openai' (~> 8.1). Add it to your Gemfile."
|
|
5
|
+
end
|
|
6
|
+
|
|
7
|
+
require_relative 'llm_base'
|
|
8
|
+
|
|
9
|
+
module LangGraphRB
|
|
10
|
+
class ChatRubyOpenAI < LLMBase
|
|
11
|
+
def initialize(model:, temperature: 0.0, api_key: ENV['OPENAI_API_KEY'], client: nil)
|
|
12
|
+
super(model: model, temperature: temperature)
|
|
13
|
+
@client = client || OpenAI::Client.new(access_token: api_key)
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def bind_tools(tools)
|
|
17
|
+
dup_instance = self.class.new(model: @model, temperature: @temperature)
|
|
18
|
+
dup_instance.instance_variable_set(:@client, @client)
|
|
19
|
+
dup_instance.instance_variable_set(:@bound_tools, Array(tools))
|
|
20
|
+
dup_instance
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def call(messages, tools: nil)
|
|
24
|
+
raise ArgumentError, 'messages must be an Array' unless messages.is_a?(Array)
|
|
25
|
+
|
|
26
|
+
tool_definitions = (tools || @bound_tools)
|
|
27
|
+
tool_schemas = Array(tool_definitions).flat_map do |tool|
|
|
28
|
+
if tool.respond_to?(:to_openai_tool_schema)
|
|
29
|
+
Array(tool.to_openai_tool_schema)
|
|
30
|
+
else
|
|
31
|
+
[tool]
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
request_payload = {
|
|
36
|
+
model: @model,
|
|
37
|
+
temperature: @temperature,
|
|
38
|
+
messages: normalize_messages(messages)
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if tool_schemas && !tool_schemas.empty?
|
|
42
|
+
request_payload[:tools] = tool_schemas
|
|
43
|
+
request_payload[:tool_choice] = 'auto'
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
notify_llm_request({
|
|
47
|
+
name: 'OpenAI::ChatCompletion',
|
|
48
|
+
model: @model,
|
|
49
|
+
model_parameters: { temperature: @temperature },
|
|
50
|
+
input: request_payload[:messages]
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
# ruby-openai 8.1.x: client.chat(parameters: {...}) returns a Hash
|
|
54
|
+
response = @client.chat(parameters: request_payload)
|
|
55
|
+
|
|
56
|
+
message = extract_message_from_response(response)
|
|
57
|
+
tool_calls = message[:tool_calls]
|
|
58
|
+
text_content = message[:content]
|
|
59
|
+
|
|
60
|
+
usage = extract_usage_from_response(response)
|
|
61
|
+
notify_llm_response({
|
|
62
|
+
output: tool_calls ? { tool_calls: tool_calls } : text_content,
|
|
63
|
+
prompt_tokens: usage[:prompt_tokens],
|
|
64
|
+
completion_tokens: usage[:completion_tokens],
|
|
65
|
+
total_tokens: usage[:total_tokens]
|
|
66
|
+
})
|
|
67
|
+
|
|
68
|
+
if tool_calls && !tool_calls.empty?
|
|
69
|
+
normalized_calls = tool_calls.map do |tc|
|
|
70
|
+
{
|
|
71
|
+
id: tc[:id],
|
|
72
|
+
name: tc[:function][:name],
|
|
73
|
+
arguments: parse_tool_arguments(tc[:function][:arguments])
|
|
74
|
+
}
|
|
75
|
+
end
|
|
76
|
+
{ tool_calls: normalized_calls }
|
|
77
|
+
else
|
|
78
|
+
text_content
|
|
79
|
+
end
|
|
80
|
+
rescue => e
|
|
81
|
+
notify_llm_error({ error: e.message })
|
|
82
|
+
raise e
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
private
|
|
86
|
+
|
|
87
|
+
def normalize_messages(messages)
|
|
88
|
+
messages.map do |m|
|
|
89
|
+
role = (m[:role] || m['role'])
|
|
90
|
+
content = m[:content] || m['content']
|
|
91
|
+
|
|
92
|
+
normalized = { role: role }
|
|
93
|
+
|
|
94
|
+
if content.is_a?(Array)
|
|
95
|
+
normalized[:content] = content
|
|
96
|
+
elsif content.nil?
|
|
97
|
+
normalized[:content] = nil
|
|
98
|
+
else
|
|
99
|
+
normalized[:content] = content.to_s
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
tool_calls = m[:tool_calls] || m['tool_calls']
|
|
103
|
+
if tool_calls && role.to_s == 'assistant'
|
|
104
|
+
normalized[:tool_calls] = Array(tool_calls).map do |tc|
|
|
105
|
+
if tc[:function] || tc['function']
|
|
106
|
+
fn = tc[:function] || tc['function']
|
|
107
|
+
raw_args = fn[:arguments] || fn['arguments']
|
|
108
|
+
args_str = raw_args.is_a?(String) ? raw_args : JSON.dump(raw_args || {})
|
|
109
|
+
{
|
|
110
|
+
id: (tc[:id] || tc['id']),
|
|
111
|
+
type: 'function',
|
|
112
|
+
function: {
|
|
113
|
+
name: (fn[:name] || fn['name']).to_s,
|
|
114
|
+
arguments: args_str
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
else
|
|
118
|
+
raw_args = tc[:arguments] || tc['arguments']
|
|
119
|
+
args_str = raw_args.is_a?(String) ? raw_args : JSON.dump(raw_args || {})
|
|
120
|
+
{
|
|
121
|
+
id: (tc[:id] || tc['id']),
|
|
122
|
+
type: 'function',
|
|
123
|
+
function: {
|
|
124
|
+
name: (tc[:name] || tc['name']).to_s,
|
|
125
|
+
arguments: args_str
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
end
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
if role.to_s == 'tool'
|
|
133
|
+
tool_call_id = m[:tool_call_id] || m['tool_call_id']
|
|
134
|
+
name = m[:name] || m['name']
|
|
135
|
+
normalized[:tool_call_id] = tool_call_id if tool_call_id
|
|
136
|
+
normalized[:name] = name if name
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
normalized
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
def parse_tool_arguments(raw)
|
|
144
|
+
return {} if raw.nil?
|
|
145
|
+
case raw
|
|
146
|
+
when String
|
|
147
|
+
JSON.parse(raw) rescue {}
|
|
148
|
+
when Hash
|
|
149
|
+
raw
|
|
150
|
+
else
|
|
151
|
+
{}
|
|
152
|
+
end
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def extract_message_from_response(response)
|
|
156
|
+
(response['choices'] || []).dig(0, 'message') || {}
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
def extract_usage_from_response(response)
|
|
160
|
+
usage = response['usage']
|
|
161
|
+
return { prompt_tokens: nil, completion_tokens: nil, total_tokens: nil } unless usage
|
|
162
|
+
{
|
|
163
|
+
prompt_tokens: usage['prompt_tokens'],
|
|
164
|
+
completion_tokens: usage['completion_tokens'],
|
|
165
|
+
total_tokens: usage['total_tokens']
|
|
166
|
+
}
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
|
data/lib/langgraph_rb/graph.rb
CHANGED
|
@@ -35,11 +35,11 @@ module LangGraphRB
|
|
|
35
35
|
end
|
|
36
36
|
end
|
|
37
37
|
|
|
38
|
-
def llm_node(name, llm_client:, system_prompt: nil,
|
|
38
|
+
def llm_node(name, llm_client:, system_prompt: nil, &block)
|
|
39
39
|
name = name.to_sym
|
|
40
40
|
raise GraphError, "Node '#{name}' already exists" if @nodes.key?(name)
|
|
41
41
|
|
|
42
|
-
@nodes[name] = LLMNode.new(name, llm_client: llm_client, system_prompt: system_prompt,
|
|
42
|
+
@nodes[name] = LLMNode.new(name, llm_client: llm_client, system_prompt: system_prompt, &block)
|
|
43
43
|
end
|
|
44
44
|
|
|
45
45
|
def tool_node(name, tools:, &block)
|
data/lib/langgraph_rb/node.rb
CHANGED
|
@@ -35,12 +35,11 @@ module LangGraphRB
|
|
|
35
35
|
|
|
36
36
|
# Specialized node for LLM calls
|
|
37
37
|
class LLMNode < Node
|
|
38
|
-
attr_reader :llm_client, :system_prompt
|
|
38
|
+
attr_reader :llm_client, :system_prompt
|
|
39
39
|
|
|
40
|
-
def initialize(name, llm_client:, system_prompt: nil,
|
|
40
|
+
def initialize(name, llm_client:, system_prompt: nil, &block)
|
|
41
41
|
@llm_client = llm_client
|
|
42
42
|
@system_prompt = system_prompt
|
|
43
|
-
@add_message_callback = add_message_callback
|
|
44
43
|
|
|
45
44
|
# Use default LLM behavior if no custom block provided
|
|
46
45
|
super(name, &(block || method(:default_llm_call)))
|
|
@@ -50,8 +49,7 @@ module LangGraphRB
|
|
|
50
49
|
# Auto-inject LLM config into the context for both default and custom blocks
|
|
51
50
|
merged_context = (context || {}).merge(
|
|
52
51
|
llm_client: @llm_client,
|
|
53
|
-
system_prompt: @system_prompt
|
|
54
|
-
add_message_callback: @add_message_callback
|
|
52
|
+
system_prompt: @system_prompt
|
|
55
53
|
)
|
|
56
54
|
|
|
57
55
|
begin
|
|
@@ -89,14 +87,12 @@ module LangGraphRB
|
|
|
89
87
|
content: nil,
|
|
90
88
|
tool_calls: response[:tool_calls]
|
|
91
89
|
}
|
|
92
|
-
@add_message_callback&.call(assistant_msg)
|
|
93
90
|
{
|
|
94
91
|
messages: (state[:messages] || []) + [assistant_msg],
|
|
95
92
|
tool_call: response[:tool_calls].first
|
|
96
93
|
}
|
|
97
94
|
else
|
|
98
95
|
assistant_msg = { role: 'assistant', content: response.to_s }
|
|
99
|
-
@add_message_callback&.call(assistant_msg)
|
|
100
96
|
{
|
|
101
97
|
messages: (state[:messages] || []) + [assistant_msg],
|
|
102
98
|
last_response: response.to_s
|
|
@@ -162,7 +158,7 @@ module LangGraphRB
|
|
|
162
158
|
return {
|
|
163
159
|
id: call[:id],
|
|
164
160
|
name: name.to_sym,
|
|
165
|
-
class_name: class_name,
|
|
161
|
+
class_name: class_name.gsub('--', '::'),
|
|
166
162
|
arguments: call[:arguments]
|
|
167
163
|
}
|
|
168
164
|
elsif call[:function]
|
data/lib/langgraph_rb/version.rb
CHANGED
data/lib/langgraph_rb.rb
CHANGED
|
@@ -11,7 +11,6 @@ require_relative 'langgraph_rb/observers/logger'
|
|
|
11
11
|
require_relative 'langgraph_rb/observers/structured'
|
|
12
12
|
require_relative 'langgraph_rb/observers/langfuse'
|
|
13
13
|
require_relative 'langgraph_rb/llm_base'
|
|
14
|
-
require_relative 'langgraph_rb/chat_openai'
|
|
15
14
|
require_relative 'langgraph_rb/tool_definition'
|
|
16
15
|
|
|
17
16
|
module LangGraphRB
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: langgraph_rb
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.1.
|
|
4
|
+
version: 0.1.9
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Julian Toro
|
|
@@ -24,20 +24,6 @@ dependencies:
|
|
|
24
24
|
- - "~>"
|
|
25
25
|
- !ruby/object:Gem::Version
|
|
26
26
|
version: '2.0'
|
|
27
|
-
- !ruby/object:Gem::Dependency
|
|
28
|
-
name: openai
|
|
29
|
-
requirement: !ruby/object:Gem::Requirement
|
|
30
|
-
requirements:
|
|
31
|
-
- - "~>"
|
|
32
|
-
- !ruby/object:Gem::Version
|
|
33
|
-
version: 0.24.0
|
|
34
|
-
type: :runtime
|
|
35
|
-
prerelease: false
|
|
36
|
-
version_requirements: !ruby/object:Gem::Requirement
|
|
37
|
-
requirements:
|
|
38
|
-
- - "~>"
|
|
39
|
-
- !ruby/object:Gem::Version
|
|
40
|
-
version: 0.24.0
|
|
41
27
|
- !ruby/object:Gem::Dependency
|
|
42
28
|
name: bundler
|
|
43
29
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -136,6 +122,7 @@ files:
|
|
|
136
122
|
- langgraph_rb.gemspec
|
|
137
123
|
- lib/langgraph_rb.rb
|
|
138
124
|
- lib/langgraph_rb/chat_openai.rb
|
|
125
|
+
- lib/langgraph_rb/chat_ruby_openai.rb
|
|
139
126
|
- lib/langgraph_rb/command.rb
|
|
140
127
|
- lib/langgraph_rb/edge.rb
|
|
141
128
|
- lib/langgraph_rb/graph.rb
|