llama_bot_rails 0.1.7 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/config/routes.rb CHANGED
@@ -1,6 +1,9 @@
1
1
  LlamaBotRails::Engine.routes.draw do
2
2
  post "agent/command", to: "agent#command"
3
3
  get "agent/chat", to: "agent#chat"
4
+ get "agent/chat_ws", to: "agent#chat_ws"
4
5
  get "agent/threads", to: "agent#threads"
5
6
  get "agent/chat-history/:thread_id", to: "agent#chat_history"
7
+ post "agent/send_message", to: "agent#send_message"
8
+ get "agent/test_streaming", to: "agent#test_streaming"
6
9
  end
@@ -4,9 +4,25 @@ module LlamaBotRails
4
4
  class InstallGenerator < Rails::Generators::Base
5
5
  source_root File.expand_path("templates", __dir__)
6
6
 
7
- def create_config_file
8
- empty_directory "config/llama_bot"
9
- copy_file "agent_prompt.txt", "config/llama_bot/agent_prompt.txt"
7
+ def allow_docker_host
8
+ dev_config = "config/environments/development.rb"
9
+ insertion = " config.hosts << /host\\.docker\\.internal/ # Allow Docker agent to access Rails\n"
10
+
11
+ unless File.read(dev_config).include?("host.docker.internal")
12
+ inject_into_file dev_config, insertion, after: "Rails.application.configure do\n"
13
+ say_status("updated", "Added host.docker.internal to development.rb", :green)
14
+ end
15
+ end
16
+
17
+ def create_agent_prompt
18
+ empty_directory "app/llama_bot/prompts"
19
+ copy_file "agent_prompt.txt", "app/llama_bot/prompts/agent_prompt.txt"
20
+ end
21
+
22
+ def create_agent_state_builder
23
+ empty_directory "app/llama_bot"
24
+ template "agent_state_builder.rb.erb", "app/llama_bot/agent_state_builder.rb"
25
+ say_status("created", "app/llama_bot/agent_state_builder.rb", :green)
10
26
  end
11
27
 
12
28
  def mount_engine
@@ -36,9 +52,17 @@ module LlamaBotRails
36
52
  def create_initializer
37
53
  create_file "config/initializers/llama_bot_rails.rb", <<~RUBY
38
54
  Rails.application.configure do
39
- config.llama_bot_rails.websocket_url = ENV.fetch("LLAMABOT_WEBSOCKET_URL", "ws://localhost:8000/ws")
40
- config.llama_bot_rails.llamabot_api_url = ENV.fetch("LLAMABOT_API_URL", "http://localhost:8000")
55
+ config.llama_bot_rails.websocket_url = ENV.fetch("LLAMABOT_WEBSOCKET_URL", "ws://localhost:8000/ws")
56
+ config.llama_bot_rails.llamabot_api_url = ENV.fetch("LLAMABOT_API_URL", "http://localhost:8000")
41
57
  config.llama_bot_rails.enable_console_tool = !Rails.env.production?
58
+
59
+ # ------------------------------------------------------------------------
60
+ # Custom State Builder
61
+ # ------------------------------------------------------------------------
62
+ # The gem uses `LlamaBotRails::AgentStateBuilder` by default.
63
+ # Uncomment this line to use the builder in app/llama_bot/
64
+ #
65
+ # config.llama_bot_rails.state_builder_class = "#{app_name}::AgentStateBuilder"
42
66
  end
43
67
  RUBY
44
68
  end
@@ -46,6 +70,12 @@ module LlamaBotRails
46
70
  def finish
47
71
  say "\n✅ LlamaBotRails installed! Visit http://localhost:3000/llama_bot/agent/chat\n", :green
48
72
  end
73
+
74
+ private
75
+
76
+ def app_name
77
+ Rails.application.class.module_parent_name
78
+ end
49
79
  end
50
80
  end
51
81
  end
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+ #
3
+ # Customize the params sent to your LangGraph agent here.
4
+ # Uncomment the line in the initializer to activate this builder.
5
+ module <%= app_name %>
6
+ class AgentStateBuilder
7
+ def initialize(params:, context:)
8
+ @params = params
9
+ @context = context
10
+ end
11
+
12
+ def build
13
+ {
14
+ message: @params[:message], # Rails param from JS/chat UI. This is the user's message to the agent.
15
+ thread_id: @context[:thread_id], # This is the thread id for the agent. It is used to track the conversation history.
16
+ api_token: @context[:api_token], # This is an authenticated API token for the agent, so that it can authenticate with us. (It may need access to resources on our Rails app, such as the Rails Console.)
17
+ agent_prompt: LlamaBotRails.agent_prompt_text, # System prompt instructions for the agent. Can be customized in app/llama_bot/prompts/agent_prompt.txt
18
+ agent_name: "llamabot" # This routes to the appropriate LangGraph agent as defined in LlamaBot/langgraph.json, and enables us to access different agents on our LlamaBot server.
19
+ }
20
+ end
21
+ end
22
+ end
@@ -1,17 +1,22 @@
1
1
  module LlamaBotRails
2
+ #This state builder maps to a LangGraph agent state. Most agents will have custom state. You can create a custom agentstatebuilder when creating new, custom agents.
2
3
  class AgentStateBuilder
3
4
  def initialize(params:, context:)
4
5
  @params = params
5
6
  @context = context
6
7
  end
7
-
8
- def build
8
+
9
+
10
+ # Warning: Types must match exactly or you'll get Pydantic errors. It's brittle - If these don't match exactly what's in nodes.py LangGraph state pydantic types, (For example, having a null value/None type when it should be a string) it will the agent..
11
+ # So if it doesn't map state types properly from the frontend, it will break. (must be exactly what's defined here).
12
+ # There won't be an exception thrown -- instead, you'll get an pydantic error message showing up in the BaseMessage content field. (In my case, it was a broken ToolMessage, but serializes from the inherited BaseMessage)
13
+ def build
9
14
  {
10
- user_message: @params[:message], # Rails param from JS/chat UI
11
- thread_id: @context[:thread_id],
12
- api_token: @context[:api_token],
13
- agent_prompt: LlamaBotRails.agent_prompt_text,
14
- agent_name: "llamabot" #Very important. This routes to the appropriate LangGraph agent as defined in langgraph.json
15
+ message: @params[:message], # Rails param from JS/chat UI. This is the user's message to the agent.
16
+ thread_id: @params[:thread_id], # This is the thread id for the agent. It is used to track the conversation history.
17
+ api_token: @context[:api_token], # This is an authenticated API token for the agent, so that it can authenticate with us. (It may need access to resources on our Rails app, such as the Rails Console.)
18
+ agent_prompt: LlamaBotRails.agent_prompt_text, # System prompt instructions for the agent. Can be customized in app/llama_bot/prompts/agent_prompt.txt
19
+ agent_name: "llamabot" #This routes to the appropriate LangGraph agent as defined in LlamaBot/langgraph.json, and enables us to access different agents on our LlamaBot server.
15
20
  }
16
21
  end
17
22
  end
@@ -20,7 +20,7 @@ module LlamaBotRails
20
20
  end
21
21
 
22
22
  initializer "llama_bot_rails.defaults" do |app|
23
- app.config.llama_bot_rails.state_builder_class = "LlamaBotRails::AgentStateBuilder"
23
+ app.config.llama_bot_rails.state_builder_class ||= "LlamaBotRails::AgentStateBuilder"
24
24
  end
25
25
  end
26
26
  end
@@ -3,6 +3,7 @@ require 'json'
3
3
  require 'uri'
4
4
 
5
5
  module LlamaBotRails
6
+ #This class is responsible for initiating HTTP requests to the FastAPI backend that takes us to LangGraph.
6
7
  class LlamaBot
7
8
  def self.get_threads
8
9
  uri = URI('http://localhost:8000/threads')
@@ -21,5 +22,51 @@ module LlamaBotRails
21
22
  Rails.logger.error "Error fetching chat history: #{e.message}"
22
23
  []
23
24
  end
25
+
26
+ def self.send_agent_message(agent_params)
27
+ return enum_for(__method__, agent_params) unless block_given?
28
+
29
+ uri = URI("http://localhost:8000/llamabot-chat-message")
30
+ http = Net::HTTP.new(uri.host, uri.port)
31
+
32
+ request = Net::HTTP::Post.new(uri)
33
+ request['Content-Type'] = 'application/json'
34
+ request.body = agent_params.to_json
35
+
36
+ # Stream the response instead of buffering it
37
+ http.request(request) do |response|
38
+ if response.code.to_i == 200
39
+ buffer = ''
40
+
41
+ response.read_body do |chunk|
42
+ buffer += chunk
43
+
44
+ # Process complete lines (ended with \n)
45
+ while buffer.include?("\n")
46
+ line, buffer = buffer.split("\n", 2)
47
+ if line.strip.present?
48
+ begin
49
+ yield JSON.parse(line)
50
+ rescue JSON::ParserError => e
51
+ Rails.logger.error "Parse error: #{e.message}"
52
+ end
53
+ end
54
+ end
55
+ end
56
+
57
+ # Process any remaining data in buffer
58
+ if buffer.strip.present?
59
+ begin
60
+ yield JSON.parse(buffer)
61
+ rescue JSON::ParserError => e
62
+ Rails.logger.error "Final buffer parse error: #{e.message}"
63
+ end
64
+ end
65
+ end
66
+ end
67
+ rescue => e
68
+ Rails.logger.error "Error sending agent message: #{e.message}"
69
+ { error: e.message }
70
+ end
24
71
  end
25
72
  end
@@ -0,0 +1,19 @@
1
+ module LlamaBotRails
2
+ class Railtie < ::Rails::Railtie
3
+ config.before_configuration do |app|
4
+ llama_bot_path = Rails.root.join("app", "llama_bot")
5
+
6
+ # Add to autoload paths if it exists and isn't already included
7
+ if llama_bot_path.exist? && !app.config.autoload_paths.include?(llama_bot_path.to_s)
8
+ app.config.autoload_paths << llama_bot_path.to_s
9
+ Rails.logger&.info "[LlamaBot] Added #{llama_bot_path} to autoload_paths"
10
+ end
11
+
12
+ # Add to eager load paths if it exists and isn't already included
13
+ if llama_bot_path.exist? && !app.config.eager_load_paths.include?(llama_bot_path.to_s)
14
+ app.config.eager_load_paths << llama_bot_path.to_s
15
+ Rails.logger&.info "[LlamaBot] Added #{llama_bot_path} to eager_load_paths"
16
+ end
17
+ end
18
+ end
19
+ end
@@ -1,3 +1,3 @@
1
1
  module LlamaBotRails
2
- VERSION = "0.1.7"
2
+ VERSION = "0.1.9"
3
3
  end
@@ -1,5 +1,7 @@
1
1
  require "llama_bot_rails/version"
2
2
  require "llama_bot_rails/engine"
3
+ require "llama_bot_rails/llama_bot"
4
+ require "llama_bot_rails/railtie"
3
5
 
4
6
  module LlamaBotRails
5
7
  class << self
@@ -8,7 +10,7 @@ module LlamaBotRails
8
10
  end
9
11
 
10
12
  def agent_prompt_path
11
- Rails.root.join("config", "llama_bot", "agent_prompt.txt")
13
+ Rails.root.join("app", "llama_bot", "prompts", "agent_prompt.txt")
12
14
  end
13
15
 
14
16
  def agent_prompt_text
@@ -23,5 +25,9 @@ module LlamaBotRails
23
25
  FileUtils.mkdir_p(agent_prompt_path.dirname)
24
26
  File.write(agent_prompt_path, "\n#{new_instruction}", mode: 'a')
25
27
  end
28
+
29
+ def send_agent_message(agent_params)
30
+ LlamaBot.send_agent_message(agent_params)
31
+ end
26
32
  end
27
33
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llama_bot_rails
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.7
4
+ version: 0.1.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kody Kendall
@@ -117,16 +117,19 @@ files:
117
117
  - app/models/llama_bot_rails/application_record.rb
118
118
  - app/views/layouts/llama_bot_rails/application.html.erb
119
119
  - app/views/llama_bot_rails/agent/chat.html.erb
120
+ - app/views/llama_bot_rails/agent/chat_ws.html.erb
120
121
  - bin/rails
121
122
  - bin/rubocop
122
123
  - config/initializers/llama_bot_rails.rb
123
124
  - config/routes.rb
124
125
  - lib/generators/llama_bot_rails/install/install_generator.rb
125
126
  - lib/generators/llama_bot_rails/install/templates/agent_prompt.txt
127
+ - lib/generators/llama_bot_rails/install/templates/agent_state_builder.rb.erb
126
128
  - lib/llama_bot_rails.rb
127
129
  - lib/llama_bot_rails/agent_state_builder.rb
128
130
  - lib/llama_bot_rails/engine.rb
129
131
  - lib/llama_bot_rails/llama_bot.rb
132
+ - lib/llama_bot_rails/railtie.rb
130
133
  - lib/llama_bot_rails/tools/rails_console_tool.rb
131
134
  - lib/llama_bot_rails/version.rb
132
135
  - lib/tasks/llama_bot_rails_tasks.rake