llama_bot_rails 0.1.7 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +22 -22
- data/app/channels/llama_bot_rails/chat_channel.rb +28 -0
- data/app/controllers/llama_bot_rails/agent_controller.rb +78 -5
- data/app/views/llama_bot_rails/agent/chat.html.erb +250 -127
- data/app/views/llama_bot_rails/agent/chat_ws.html.erb +1178 -0
- data/config/routes.rb +3 -0
- data/lib/generators/llama_bot_rails/install/install_generator.rb +10 -0
- data/lib/llama_bot_rails/agent_state_builder.rb +12 -7
- data/lib/llama_bot_rails/llama_bot.rb +47 -0
- data/lib/llama_bot_rails/version.rb +1 -1
- data/lib/llama_bot_rails.rb +5 -0
- metadata +2 -1
data/config/routes.rb
CHANGED
@@ -1,6 +1,9 @@
|
|
1
1
|
LlamaBotRails::Engine.routes.draw do
|
2
2
|
post "agent/command", to: "agent#command"
|
3
3
|
get "agent/chat", to: "agent#chat"
|
4
|
+
get "agent/chat_ws", to: "agent#chat_ws"
|
4
5
|
get "agent/threads", to: "agent#threads"
|
5
6
|
get "agent/chat-history/:thread_id", to: "agent#chat_history"
|
7
|
+
post "agent/send_message", to: "agent#send_message"
|
8
|
+
get "agent/test_streaming", to: "agent#test_streaming"
|
6
9
|
end
|
@@ -4,6 +4,16 @@ module LlamaBotRails
|
|
4
4
|
class InstallGenerator < Rails::Generators::Base
|
5
5
|
source_root File.expand_path("templates", __dir__)
|
6
6
|
|
7
|
+
def allow_docker_host
|
8
|
+
dev_config = "config/environments/development.rb"
|
9
|
+
insertion = " config.hosts << /host\\.docker\\.internal/ # Allow Docker agent to access Rails\n"
|
10
|
+
|
11
|
+
unless File.read(dev_config).include?("host.docker.internal")
|
12
|
+
inject_into_file dev_config, insertion, after: "Rails.application.configure do\n"
|
13
|
+
say_status("updated", "Added host.docker.internal to development.rb", :green)
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
7
17
|
def create_config_file
|
8
18
|
empty_directory "config/llama_bot"
|
9
19
|
copy_file "agent_prompt.txt", "config/llama_bot/agent_prompt.txt"
|
@@ -1,17 +1,22 @@
|
|
1
1
|
module LlamaBotRails
|
2
|
+
#This state builder maps to a LangGraph agent state. Most agents will have custom state. You can create a custom agentstatebuilder when creating new, custom agents.
|
2
3
|
class AgentStateBuilder
|
3
4
|
def initialize(params:, context:)
|
4
5
|
@params = params
|
5
6
|
@context = context
|
6
7
|
end
|
7
|
-
|
8
|
-
|
8
|
+
|
9
|
+
|
10
|
+
# Warning: Types must match exactly or you'll get Pydantic errors. It's brittle - If these don't match exactly what's in nodes.py LangGraph state pydantic types, (For example, having a null value/None type when it should be a string) it will the agent..
|
11
|
+
# So if it doesn't map state types properly from the frontend, it will break. (must be exactly what's defined here).
|
12
|
+
# There won't be an exception thrown -- instead, you'll get an pydantic error message showing up in the BaseMessage content field. (In my case, it was a broken ToolMessage, but serializes from the inherited BaseMessage)
|
13
|
+
def build
|
9
14
|
{
|
10
|
-
|
11
|
-
thread_id: @context[:thread_id],
|
12
|
-
api_token: @context[:api_token],
|
13
|
-
agent_prompt: LlamaBotRails.agent_prompt_text,
|
14
|
-
agent_name: "llamabot" #
|
15
|
+
message: @params[:message], # Rails param from JS/chat UI. This is the user's message to the agent.
|
16
|
+
thread_id: @context[:thread_id], # This is the thread id for the agent. It is used to track the conversation history.
|
17
|
+
api_token: @context[:api_token], # This is an authenticated API token for the agent, so that it can authenticate with us. (It may need access to resources on our Rails app, such as the Rails Console.)
|
18
|
+
agent_prompt: LlamaBotRails.agent_prompt_text, # System prompt instructions for the agent. Can be customized in config/agent_prompt.txt
|
19
|
+
agent_name: "llamabot" #This routes to the appropriate LangGraph agent as defined in LlamaBot/langgraph.json, and enables us to access different agents on our LlamaBot server.
|
15
20
|
}
|
16
21
|
end
|
17
22
|
end
|
@@ -3,6 +3,7 @@ require 'json'
|
|
3
3
|
require 'uri'
|
4
4
|
|
5
5
|
module LlamaBotRails
|
6
|
+
#This class is responsible for initiating HTTP requests to the FastAPI backend that takes us to LangGraph.
|
6
7
|
class LlamaBot
|
7
8
|
def self.get_threads
|
8
9
|
uri = URI('http://localhost:8000/threads')
|
@@ -21,5 +22,51 @@ module LlamaBotRails
|
|
21
22
|
Rails.logger.error "Error fetching chat history: #{e.message}"
|
22
23
|
[]
|
23
24
|
end
|
25
|
+
|
26
|
+
def self.send_agent_message(agent_params)
|
27
|
+
return enum_for(__method__, agent_params) unless block_given?
|
28
|
+
|
29
|
+
uri = URI("http://localhost:8000/llamabot-chat-message")
|
30
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
31
|
+
|
32
|
+
request = Net::HTTP::Post.new(uri)
|
33
|
+
request['Content-Type'] = 'application/json'
|
34
|
+
request.body = agent_params.to_json
|
35
|
+
|
36
|
+
# Stream the response instead of buffering it
|
37
|
+
http.request(request) do |response|
|
38
|
+
if response.code.to_i == 200
|
39
|
+
buffer = ''
|
40
|
+
|
41
|
+
response.read_body do |chunk|
|
42
|
+
buffer += chunk
|
43
|
+
|
44
|
+
# Process complete lines (ended with \n)
|
45
|
+
while buffer.include?("\n")
|
46
|
+
line, buffer = buffer.split("\n", 2)
|
47
|
+
if line.strip.present?
|
48
|
+
begin
|
49
|
+
yield JSON.parse(line)
|
50
|
+
rescue JSON::ParserError => e
|
51
|
+
Rails.logger.error "Parse error: #{e.message}"
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
# Process any remaining data in buffer
|
58
|
+
if buffer.strip.present?
|
59
|
+
begin
|
60
|
+
yield JSON.parse(buffer)
|
61
|
+
rescue JSON::ParserError => e
|
62
|
+
Rails.logger.error "Final buffer parse error: #{e.message}"
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
rescue => e
|
68
|
+
Rails.logger.error "Error sending agent message: #{e.message}"
|
69
|
+
{ error: e.message }
|
70
|
+
end
|
24
71
|
end
|
25
72
|
end
|
data/lib/llama_bot_rails.rb
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
require "llama_bot_rails/version"
|
2
2
|
require "llama_bot_rails/engine"
|
3
|
+
require "llama_bot_rails/llama_bot"
|
3
4
|
|
4
5
|
module LlamaBotRails
|
5
6
|
class << self
|
@@ -23,5 +24,9 @@ module LlamaBotRails
|
|
23
24
|
FileUtils.mkdir_p(agent_prompt_path.dirname)
|
24
25
|
File.write(agent_prompt_path, "\n#{new_instruction}", mode: 'a')
|
25
26
|
end
|
27
|
+
|
28
|
+
def send_agent_message(agent_params)
|
29
|
+
LlamaBot.send_agent_message(agent_params)
|
30
|
+
end
|
26
31
|
end
|
27
32
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: llama_bot_rails
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.8
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kody Kendall
|
@@ -117,6 +117,7 @@ files:
|
|
117
117
|
- app/models/llama_bot_rails/application_record.rb
|
118
118
|
- app/views/layouts/llama_bot_rails/application.html.erb
|
119
119
|
- app/views/llama_bot_rails/agent/chat.html.erb
|
120
|
+
- app/views/llama_bot_rails/agent/chat_ws.html.erb
|
120
121
|
- bin/rails
|
121
122
|
- bin/rubocop
|
122
123
|
- config/initializers/llama_bot_rails.rb
|