llama_bot_rails 0.1.13 → 0.1.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 73211770ce4e13b9fb835721ed12f0d422889a84d0c45473174b7bf97ff76e93
4
- data.tar.gz: b70b8fdcc9375f45d317427cd62546c3d2190fab82d9769f50afeaabc3af4870
3
+ metadata.gz: 4a2a7cba5de6b95658edf2f1445ddd883bf0fa7b6f0a844fc7b65d75696bbe93
4
+ data.tar.gz: 471a4c54a3f26dd4d6b68c6d208d3c8e553cfcf2933436bad3e7b30af2d50296
5
5
  SHA512:
6
- metadata.gz: c7b9a928144a765964510b4e1d2ce51a0736b3105a083503f98cb7681e229a6e7fd2c7d9246c13ee9233f04adbd2448d03bc4dfd626b550bbb8c38bf848cbbdb
7
- data.tar.gz: e94b2ee65ac23a81ba952bfd14fd2679c442a80058f42743cd06c0c132dd6164dcebdca885c1979eeeeac89732ae4fa2726d51266669df2834a4a85ae1a8d857
6
+ metadata.gz: 49afcfcec2697177cccc266cbdca5b7b0859feef23fb45d2626435c686368e0231d9cf0c66b894a3a0696dc50d54c9e285fc6b66905b42be752ac24126c82d97
7
+ data.tar.gz: ada3ecb0a74f2fef56122ffcc97889b7c877d670296a5f8a556d3afad685b53332f7be4306cfdf901f2b00744f8fd600c56041605aef02db666d9e51fe633a72
@@ -88,6 +88,16 @@ module LlamaBotRails
88
88
  end
89
89
  end
90
90
 
91
+ # Close the external WebSocket connection BEFORE stopping async tasks
92
+ if @external_ws_connection
93
+ begin
94
+ @external_ws_connection.close
95
+ Rails.logger.info "👋 [LlamaBot] Gracefully closed external WebSocket connection for: #{connection_id}"
96
+ rescue => e
97
+ Rails.logger.warn "❌ [LlamaBot] Could not close WebSocket connection: #{e.message}"
98
+ end
99
+ end
100
+
91
101
  # Clean up async tasks with better error handling
92
102
  begin
93
103
  @listener_task&.stop rescue nil
@@ -97,16 +107,6 @@ module LlamaBotRails
97
107
  Rails.logger.error "[LlamaBot] Error stopping async tasks: #{e.message}"
98
108
  end
99
109
 
100
- # Clean up the connection
101
- if @external_ws_connection
102
- begin
103
- @external_ws_connection.close
104
- Rails.logger.info "[LlamaBot] Closed external WebSocket connection for: #{connection_id}"
105
- rescue => e
106
- Rails.logger.warn "[LlamaBot] Could not close WebSocket connection: #{e.message}"
107
- end
108
- end
109
-
110
110
  # Force garbage collection in development/test environments to help clean up
111
111
  if !Rails.env.production?
112
112
  GC.start
@@ -138,7 +138,7 @@ module LlamaBotRails
138
138
 
139
139
  builder = state_builder_class.new(
140
140
  params: data,
141
- context: { api_token: @api_token }
141
+ context: { api_token: @api_token }.with_indifferent_access
142
142
  )
143
143
 
144
144
  # 2. Construct the LangGraph-ready state
@@ -253,84 +253,82 @@ module LlamaBotRails
253
253
  # Wait for tasks to complete or connection to close
254
254
  [@listener_task, @keepalive_task].each(&:wait)
255
255
  rescue => e
256
- Rails.logger.error "[LlamaBot] Failed to connect to external WebSocket for connection #{connection_id}: #{e.message}"
256
+ Rails.logger.error "[LlamaBot] Failed to connect to external WebSocket for connection #{connection_id}: #{e.message}"
257
257
  ensure
258
258
  # Clean up tasks if they exist
259
259
  @listener_task&.stop
260
260
  @keepalive_task&.stop
261
- @external_ws_connection&.close
261
+ if @external_ws_connection
262
+ @external_ws_connection.close
263
+ Rails.logger.info "👋 [LlamaBot] Cleaned up external WebSocket connection in ensure block"
264
+ end
262
265
  end
263
266
  end
264
267
  end
265
268
 
266
269
  # Listen for messages from the LlamaBot Backend
267
270
  def listen_to_external_websocket(connection)
268
- while message = connection.read
271
+ begin
272
+ while message = connection.read
273
+ # Extract the actual message content
274
+ message_content = message.buffer if message.buffer
275
+ next unless message_content.present?
269
276
 
270
- #Try to fix the ping/pong issue keepliave
271
- # if message.type == :ping
272
-
273
- # # respond with :pong
274
- # connection.write(Async::WebSocket::Messages::ControlFrame.new(:pong, frame.data))
275
- # connection.flush
276
- # next
277
- # end
278
- # Extract the actual message content
279
- if message.buffer
280
- message_content = message.buffer # Use .data to get the message content
281
- else
282
- message_content = message.content
283
- end
277
+ Rails.logger.info "[LlamaBot] Received from external WebSocket: #{message_content}"
284
278
 
285
- Rails.logger.info "[LlamaBot] Received from external WebSocket: #{message_content}"
286
-
287
- begin
288
- parsed_message = JSON.parse(message_content)
289
-
290
- if parsed_message["type"] != "pong"
291
- # byebug
292
- end
293
-
294
- case parsed_message["type"]
295
- when "ai"
296
- # Add any additional handling for write_code messages here
297
- formatted_message = { message: {type: "ai", content: parsed_message['content'], base_message: parsed_message["base_message"]} }.to_json
298
- when "tool"
299
- # Add any additional handling for tool messages here
300
- formatted_message = { message: {type: "tool", content: parsed_message['content'], base_message: parsed_message["base_message"]} }.to_json
301
- when "error"
302
- Rails.logger.error "[LlamaBot] ---------Received error message!----------"
303
- response = parsed_message['content']
304
- formatted_message = { message: message_content }.to_json
305
- Rails.logger.error "[LlamaBot] ---------------------> Response: #{response}"
306
- Rails.logger.error "[LlamaBot] ---------Completed error message!----------"
307
- when "pong"
308
- # Tell llamabot frontend that we've received a pong response, and we're still connected
309
- formatted_message = { message: {type: "pong"} }.to_json
279
+ begin
280
+ parsed_message = JSON.parse(message_content)
281
+
282
+ formatted_message = { message: {type: parsed_message["type"], content: parsed_message['content'], base_message: parsed_message["base_message"]} }.to_json
283
+ case parsed_message["type"]
284
+ when "error"
285
+ Rails.logger.error "[LlamaBot] ---------Received error message!----------"
286
+ response = parsed_message['content']
287
+ formatted_message = { message: message_content }.to_json
288
+ Rails.logger.error "[LlamaBot] ---------------------> Response: #{response}"
289
+ Rails.logger.error "[LlamaBot] ---------Completed error message!----------"
290
+ when "pong"
291
+ # Tell llamabot frontend that we've received a pong response, and we're still connected
292
+ formatted_message = { message: {type: "pong"} }.to_json
293
+ end
294
+ ActionCable.server.broadcast "chat_channel_#{params[:session_id]}", formatted_message
295
+ rescue JSON::ParserError => e
296
+ Rails.logger.error "[LlamaBot] Failed to parse message as JSON: #{e.message}"
297
+ # Continue to the next message without crashing the listener.
298
+ next
310
299
  end
311
- rescue JSON::ParserError => e
312
- Rails.logger.error "[LlamaBot] Failed to parse message as JSON: #{e.message}"
313
300
  end
314
- ActionCable.server.broadcast "chat_channel_#{params[:session_id]}", formatted_message
301
+ rescue IOError, Errno::ECONNRESET => e
302
+ # This is a recoverable error. Log it and allow the task to end gracefully.
303
+ # The `ensure` block in `setup_external_websocket` will handle the cleanup.
304
+ Rails.logger.warn "❌ [LlamaBot] Connection lost while listening: #{e.message}. The connection will be closed."
315
305
  end
316
306
  end
317
307
 
318
308
  ###
319
309
  def send_keep_alive_pings(connection)
320
310
  loop do
321
- ping_message = {
322
- type: 'ping',
323
- connection_id: @connection_id,
324
- connection_state: !connection.closed? ? 'connected' : 'disconnected',
325
- connection_class: connection.class.name
326
- }.to_json
327
- connection.write(ping_message)
328
- connection.flush
329
- Rails.logger.debug "[LlamaBot] Sent keep-alive ping: #{ping_message}"
311
+ # Stop the loop gracefully if the connection has already been closed.
312
+ break if connection.closed?
313
+
314
+ begin
315
+ ping_message = {
316
+ type: 'ping',
317
+ connection_id: @connection_id,
318
+ connection_state: !connection.closed? ? 'connected' : 'disconnected',
319
+ connection_class: connection.class.name
320
+ }.to_json
321
+ connection.write(ping_message)
322
+ connection.flush
323
+ Rails.logger.debug "[LlamaBot] Sent keep-alive ping: #{ping_message}"
324
+ rescue IOError, Errno::ECONNRESET => e
325
+ Rails.logger.warn "❌ [LlamaBot] Could not send ping, connection likely closed: #{e.message}"
326
+ # Break the loop to allow the task to terminate gracefully.
327
+ break
328
+ end
329
+
330
330
  Async::Task.current.sleep(30)
331
331
  end
332
- rescue => e
333
- Rails.logger.error "[LlamaBot] Error in keep-alive ping: #{e.message} | Connection type: #{connection.class.name}"
334
332
  end
335
333
 
336
334
  # Send messages from the user to the LlamaBot Backend Socket
@@ -100,7 +100,7 @@ module LlamaBotRails
100
100
  # 1. Instantiate the builder
101
101
  builder = state_builder_class.new(
102
102
  params: params,
103
- context: { api_token: @api_token }
103
+ context: { api_token: @api_token }.with_indifferent_access
104
104
  )
105
105
 
106
106
  # 2. Construct the LangGraph-ready state
@@ -586,7 +586,7 @@
586
586
  let currentThreadId = null;
587
587
  let isSidebarCollapsed = false;
588
588
  let streamingTimeout = null;
589
- const STREAMING_TIMEOUT_MS = 30000; // 30 seconds timeout
589
+ const STREAMING_TIMEOUT_MS = 3000000; // 3000 seconds timeout
590
590
 
591
591
  // Initialize the app
592
592
  document.addEventListener('DOMContentLoaded', function() {
@@ -968,6 +968,7 @@
968
968
  }
969
969
 
970
970
  } catch (parseError) {
971
+ addMessage(`Error: ${parseError} - Data: ${jsonData}`, 'error');
971
972
  console.error('Error parsing SSE data:', parseError, 'Data:', jsonData);
972
973
  }
973
974
  }
@@ -659,7 +659,10 @@ This deprecated and will be removed over time.
659
659
  },
660
660
  received(data) {
661
661
  const parsedData = JSON.parse(data).message;
662
+ console.log("LLM Response:", parsedData);
662
663
  switch (parsedData.type) {
664
+ case "AIMessageChunk":
665
+ addMessage(parsedData.content, parsedData.type, parsedData.base_message);
663
666
  case "ai":
664
667
  addMessage(parsedData.content, parsedData.type, parsedData.base_message);
665
668
  break;
@@ -966,6 +969,11 @@ This deprecated and will be removed over time.
966
969
  const messageDiv = document.createElement('div');
967
970
  messageDiv.className = `message ${sender}-message`;
968
971
 
972
+ if (sender == "AIMessageChunk"){
973
+ console.log("AIMessageChunk" + base_message);
974
+ messageDiv.innerHTML += text;
975
+ }
976
+
969
977
  // Parse markdown for bot messages using Snarkdown, keep plain text for user messages
970
978
  if (sender === 'ai') { //Arghh. We're having issues with difference in formats between when we're streaming from updates mode, and when pulling state from checkpoint.
971
979
  if (text == ''){ //this is most likely a tool call.
@@ -1151,7 +1159,8 @@ This deprecated and will be removed over time.
1151
1159
 
1152
1160
  }
1153
1161
  else {
1154
- messageDiv.innerHTML = snarkdown(text);
1162
+ messageDiv.innerHTML = text;
1163
+ // messageDiv.innerHTML = snarkdown(text);
1155
1164
  }
1156
1165
  } else if (sender === 'tool') { //tool messages are not parsed as markdown
1157
1166
  if (base_message.name == 'run_rails_console_command') {
@@ -11,9 +11,9 @@ module <%= app_name %>
11
11
 
12
12
  def build
13
13
  {
14
- message: @params[:message], # Rails param from JS/chat UI. This is the user's message to the agent.
15
- thread_id: @context[:thread_id], # This is the thread id for the agent. It is used to track the conversation history.
16
- api_token: @context[:api_token], # This is an authenticated API token for the agent, so that it can authenticate with us. (It may need access to resources on our Rails app, such as the Rails Console.)
14
+ message: @params["message"], # Rails param from JS/chat UI. This is the user's message to the agent.
15
+ thread_id: @params["thread_id"], # This is the thread id for the agent. It is used to track the conversation history.
16
+ api_token: @context["api_token"], # This is an authenticated API token for the agent, so that it can authenticate with us. (It may need access to resources on our Rails app, such as the Rails Console.)
17
17
  agent_prompt: LlamaBotRails.agent_prompt_text, # System prompt instructions for the agent. Can be customized in app/llama_bot/prompts/agent_prompt.txt
18
18
  agent_name: "llamabot" # This routes to the appropriate LangGraph agent as defined in LlamaBot/langgraph.json, and enables us to access different agents on our LlamaBot server.
19
19
  }
@@ -5,7 +5,10 @@ module LlamaBotRails
5
5
 
6
6
  included do
7
7
  # Add before_action filter to automatically check agent authentication for LlamaBot requests
8
- before_action :check_agent_authentication, if: :should_check_agent_auth?
8
+
9
+ if self < ActionController::Base
10
+ before_action :check_agent_authentication, if: :should_check_agent_auth?
11
+ end
9
12
 
10
13
  # ------------------------------------------------------------------
11
14
  # 1) For every Devise scope, alias authenticate_<scope>! so it now
@@ -0,0 +1,149 @@
1
+ # lib/llama_bot_rails/agent_auth.rb
2
+ module LlamaBotRails
3
+ module AgentAuth
4
+ extend ActiveSupport::Concern
5
+ AUTH_SCHEME = "LlamaBot"
6
+
7
+ included do
8
+ # ------------------------------------------------------------------
9
+ # Use the right callback macro for the including class:
10
+ # • Controllers → before_action (old behaviour)
11
+ # • ActiveJob → before_perform (uses same checker)
12
+ # • Anything
13
+ # else → do nothing
14
+ # ------------------------------------------------------------------
15
+ if respond_to?(:before_action)
16
+ before_action :check_agent_authentication, if: :should_check_agent_auth?
17
+ elsif respond_to?(:before_perform)
18
+ before_perform :check_agent_authentication
19
+ end
20
+
21
+ # ------------------------------------------------------------------
22
+ # 1) For every Devise scope, alias authenticate_<scope>! so it now
23
+ # accepts *either* a logged-in browser session OR a valid agent
24
+ # token. Existing before/skip filters keep working.
25
+ # ------------------------------------------------------------------
26
+ if defined?(Devise)
27
+ Devise.mappings.keys.each do |scope|
28
+ scope_filter = :"authenticate_#{scope}!"
29
+
30
+ alias_method scope_filter, :authenticate_user_or_agent! \
31
+ if method_defined?(scope_filter)
32
+
33
+ define_method(scope_filter) do |*args|
34
+ Rails.logger.warn(
35
+ "#{scope_filter} is now handled by LlamaBotRails::AgentAuth "\
36
+ "and will be removed in a future version. "\
37
+ "Use authenticate_user_or_agent! instead."
38
+ )
39
+ authenticate_user_or_agent!(*args)
40
+ end
41
+ end
42
+ end
43
+
44
+ # ------------------------------------------------------------------
45
+ # 2) If Devise isn’t loaded at all, fall back to one alias so apps
46
+ # that had authenticate_user! manually defined don’t break.
47
+ # ------------------------------------------------------------------
48
+ unless defined?(Devise)
49
+ original_authenticate_user =
50
+ instance_method(:authenticate_user!) if method_defined?(:authenticate_user!)
51
+
52
+ define_method(:authenticate_user!) do |*args|
53
+ authenticate_user_or_agent!(*args)
54
+ end
55
+ end
56
+ end
57
+
58
+ # --------------------------------------------------------------------
59
+ # Public helper: true if the request carries a *valid* agent token
60
+ # --------------------------------------------------------------------
61
+ def should_check_agent_auth?
62
+ # Skip if a Devise user is already signed in
63
+ return false if devise_user_signed_in?
64
+ llama_bot_request?
65
+ end
66
+
67
+ def llama_bot_request?
68
+ return false unless respond_to?(:request) && request&.headers
69
+ scheme, token = request.headers["Authorization"]&.split(" ", 2)
70
+ Rails.logger.debug("[LlamaBot] auth header = #{scheme.inspect} #{token&.slice(0,8)}…")
71
+ return false unless scheme == AUTH_SCHEME && token.present?
72
+
73
+ Rails.application.message_verifier(:llamabot_ws).verify(token)
74
+ true
75
+ rescue ActiveSupport::MessageVerifier::InvalidSignature
76
+ false
77
+ end
78
+
79
+ private
80
+
81
+ # --------------------------------------------------------------------
82
+ # Automatic check for LlamaBot requests
83
+ # --------------------------------------------------------------------
84
+ def check_agent_authentication
85
+ # Jobs don’t have a request object, so skip token logic there
86
+ return if is_a?(ActiveJob::Base)
87
+
88
+ has_permitted_actions = self.class.respond_to?(:llama_bot_permitted_actions)
89
+ return unless has_permitted_actions
90
+
91
+ is_llama_request = llama_bot_request?
92
+ action_is_whitelisted = self.class.llama_bot_permitted_actions.include?(action_name)
93
+
94
+ if is_llama_request
95
+ unless action_is_whitelisted
96
+ Rails.logger.warn("[LlamaBot] Action '#{action_name}' isn't white-listed for LlamaBot.")
97
+ render json: { error: "Action '#{action_name}' isn't white-listed for LlamaBot." },
98
+ status: :forbidden
99
+ end
100
+ elsif action_is_whitelisted
101
+ Rails.logger.warn("[LlamaBot] Action '#{action_name}' requires LlamaBot authentication.")
102
+ render json: { error: "Action '#{action_name}' requires LlamaBot authentication" },
103
+ status: :forbidden
104
+ end
105
+ end
106
+
107
+ # --------------------------------------------------------------------
108
+ # Unified guard — browser OR agent
109
+ # --------------------------------------------------------------------
110
+ def devise_user_signed_in?
111
+ return false unless defined?(Devise)
112
+ return false unless respond_to?(:request) && request&.env
113
+ request.env["warden"]&.authenticated?
114
+ end
115
+
116
+ def authenticate_user_or_agent!(*)
117
+ return if devise_user_signed_in? # any logged-in Devise scope
118
+
119
+ if llama_bot_request?
120
+ scheme, token = request.headers["Authorization"]&.split(" ", 2)
121
+ data = Rails.application.message_verifier(:llamabot_ws).verify(token)
122
+
123
+ allowed = self.class.respond_to?(:llama_bot_permitted_actions) &&
124
+ self.class.llama_bot_permitted_actions.include?(action_name)
125
+
126
+ if allowed
127
+ user_object = LlamaBotRails.user_resolver.call(data[:user_id])
128
+ unless LlamaBotRails.sign_in_method.call(request.env, user_object)
129
+ head :unauthorized
130
+ end
131
+ return # ✅ token + allow-listed action
132
+ else
133
+ Rails.logger.warn("[LlamaBot] Action '#{action_name}' isn't white-listed for LlamaBot.")
134
+ render json: { error: "Action '#{action_name}' isn't white-listed for LlamaBot." },
135
+ status: :forbidden
136
+ return false
137
+ end
138
+ end
139
+
140
+ # Fall back to Devise or plain 401
141
+ if defined?(Devise) && respond_to?(:request) && request&.env
142
+ request.env["warden"].authenticate!
143
+ else
144
+ head :unauthorized
145
+ end
146
+ end
147
+ end
148
+ end
149
+
@@ -6,17 +6,17 @@ module LlamaBotRails
6
6
  @context = context
7
7
  end
8
8
 
9
-
10
9
  # Warning: Types must match exactly or you'll get Pydantic errors. It's brittle - If these don't match exactly what's in nodes.py LangGraph state pydantic types, (For example, having a null value/None type when it should be a string) it will the agent..
11
10
  # So if it doesn't map state types properly from the frontend, it will break. (must be exactly what's defined here).
12
11
  # There won't be an exception thrown -- instead, you'll get an pydantic error message showing up in the BaseMessage content field. (In my case, it was a broken ToolMessage, but serializes from the inherited BaseMessage)
13
- def build
12
+ def build
14
13
  {
15
- message: @params[:message], # Rails param from JS/chat UI. This is the user's message to the agent.
16
- thread_id: @params[:thread_id], # This is the thread id for the agent. It is used to track the conversation history.
17
- api_token: @context[:api_token], # This is an authenticated API token for the agent, so that it can authenticate with us. (It may need access to resources on our Rails app, such as the Rails Console.)
14
+ message: @params["message"], # Rails param from JS/chat UI. This is the user's message to the agent.
15
+ thread_id: @params["thread_id"], # This is the thread id for the agent. It is used to track the conversation history.
16
+ api_token: @context["api_token"], # This is an authenticated API token for the agent, so that it can authenticate with us. (It may need access to resources on our Rails app, such as the Rails Console.)
18
17
  agent_prompt: LlamaBotRails.agent_prompt_text, # System prompt instructions for the agent. Can be customized in app/llama_bot/prompts/agent_prompt.txt
19
- agent_name: "llamabot" #This routes to the appropriate LangGraph agent as defined in LlamaBot/langgraph.json, and enables us to access different agents on our LlamaBot server.
18
+ agent_name: "llamabot", #This routes to the appropriate LangGraph agent as defined in LlamaBot/langgraph.json, and enables us to access different agents on our LlamaBot server.
19
+ available_routes: @context[:available_routes] # This is an array of routes that the agent can access. It is used to track the conversation history.
20
20
  }
21
21
  end
22
22
  end
@@ -30,6 +30,9 @@ module LlamaBotRails
30
30
  http = Net::HTTP.new(uri.host, uri.port)
31
31
 
32
32
  request = Net::HTTP::Post.new(uri)
33
+
34
+ http.use_ssl = (uri.scheme == "https")
35
+
33
36
  request['Content-Type'] = 'application/json'
34
37
  request.body = agent_params.to_json
35
38
 
@@ -0,0 +1,117 @@
1
+ module LlamaBotRails
2
+ module RouteHelper
3
+ # Extracts the description from YARD comments
4
+ def self.extract_yard_description(comment_text)
5
+ comment_text.lines.map { |l| l.sub(/^# ?/, '') }
6
+ .take_while { |l| !l.strip.start_with?('@') }
7
+ .join(' ').strip
8
+ end
9
+
10
+ # Extracts a specific YARD tag from comments
11
+ def self.extract_yard_tag(comment_text, tag)
12
+ if match = comment_text.match(/@#{tag} (.+)/)
13
+ match[1].strip
14
+ end
15
+ end
16
+
17
+ # Main method: returns XML string of formatted routes for allowed_routes
18
+ def self.formatted_routes_xml(allowed_routes)
19
+ xml_routes = ""
20
+ allowed_routes.each do |route_str|
21
+ controller, action = route_str.split('#')
22
+ matching_routes = Rails.application.routes.routes.select do |r|
23
+ r.defaults[:controller] == controller && r.defaults[:action] == action
24
+ end
25
+
26
+ matching_routes.each do |r|
27
+ verb = r.verb.to_s.gsub(/[$^]/, '') # Handles both Regexp and String
28
+ path = r.path.spec.to_s
29
+ path_params = path.scan(/:\w+/).map { |p| p[1..-1] } # e.g. ["id"]
30
+
31
+ # Extract controller class and strong parameters
32
+ controller_class = "#{controller.camelize}Controller".safe_constantize
33
+ strong_params = []
34
+ yard_metadata = {}
35
+
36
+ if controller_class
37
+ # Extract YARD documentation for the action
38
+ begin
39
+ method_obj = controller_class.instance_method(action.to_sym)
40
+ source_location = method_obj.source_location
41
+ if source_location
42
+ file_path, line_number = source_location
43
+ file_lines = File.readlines(file_path)
44
+ # Look for YARD comments above the method
45
+ comment_lines = []
46
+ current_line = line_number - 2 # Start above the method definition
47
+ while current_line >= 0 && file_lines[current_line].strip.start_with?('#')
48
+ comment_lines.unshift(file_lines[current_line].strip)
49
+ current_line -= 1
50
+ end
51
+ # Parse YARD tags
52
+ comment_text = comment_lines.join("\n")
53
+ yard_metadata[:description] = extract_yard_description(comment_text)
54
+ yard_metadata[:tool_description] = extract_yard_tag(comment_text, 'tool_description')
55
+ yard_metadata[:example] = extract_yard_tag(comment_text, 'example')
56
+ yard_metadata[:params] = extract_yard_tag(comment_text, 'params')
57
+ end
58
+ rescue => e
59
+ # Silently continue if YARD parsing fails
60
+ end
61
+ # Look for the strong parameter method (e.g., page_params, user_params, etc.)
62
+ param_method = "#{controller.singularize}_params"
63
+ if controller_class.private_method_defined?(param_method.to_sym)
64
+ source_location = controller_class.instance_method(param_method.to_sym).source_location
65
+ if source_location
66
+ file_path, line_number = source_location
67
+ file_lines = File.readlines(file_path)
68
+ method_lines = []
69
+ current_line = line_number - 1
70
+ while current_line < file_lines.length
71
+ line = file_lines[current_line].strip
72
+ method_lines << line
73
+ break if line.include?('end') && !line.include?('permit')
74
+ current_line += 1
75
+ end
76
+ method_source = method_lines.join(' ')
77
+ if match = method_source.match(/\.permit\((.*?)\)/)
78
+ permit_content = match[1]
79
+ strong_params = permit_content.scan(/:(\w+)/).flatten
80
+ end
81
+ end
82
+ end
83
+ # Also check for any additional params the action might accept
84
+ additional_params = []
85
+ case action
86
+ when 'update', 'create'
87
+ if controller == 'pages' && action == 'update'
88
+ additional_params << 'message'
89
+ end
90
+ end
91
+ all_params = (path_params + strong_params + additional_params).uniq
92
+ else
93
+ all_params = path_params
94
+ end
95
+
96
+ xml = <<~XML
97
+ <route>
98
+ <name>#{route_str}</name>
99
+ <verb>#{verb}</verb>
100
+ <path>#{path}</path>
101
+ <path_params>#{path_params.join(', ')}</path_params>
102
+ <accepted_params>#{all_params.join(', ')}</accepted_params>
103
+ <strong_params>#{strong_params.join(', ')}</strong_params>
104
+ <description>#{yard_metadata[:description]}</description>
105
+ <tool_description>#{yard_metadata[:tool_description]}</tool_description>
106
+ <example>#{yard_metadata[:example]}</example>
107
+ <params>#{yard_metadata[:params]}</params>
108
+ </route>
109
+ XML
110
+
111
+ xml_routes += xml
112
+ end
113
+ end
114
+ xml_routes
115
+ end
116
+ end
117
+ end
@@ -1,3 +1,3 @@
1
1
  module LlamaBotRails
2
- VERSION = "0.1.13"
2
+ VERSION = "0.1.14"
3
3
  end
@@ -5,6 +5,7 @@ require "llama_bot_rails/llama_bot"
5
5
  require "llama_bot_rails/agent_state_builder"
6
6
  require "llama_bot_rails/controller_extensions"
7
7
  require "llama_bot_rails/agent_auth"
8
+ require "llama_bot_rails/route_helper"
8
9
 
9
10
  module LlamaBotRails
10
11
  # ------------------------------------------------------------------
@@ -74,4 +75,4 @@ module LlamaBotRails
74
75
  # Bridge to backend service
75
76
  # ------------------------------------------------------------------
76
77
  def self.send_agent_message(params) = LlamaBot.send_agent_message(params)
77
- end
78
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llama_bot_rails
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.13
4
+ version: 0.1.14
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kody Kendall
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2025-07-16 00:00:00.000000000 Z
11
+ date: 2025-07-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rails
@@ -151,11 +151,13 @@ files:
151
151
  - lib/generators/llama_bot_rails/install/templates/agent_state_builder.rb.erb
152
152
  - lib/llama_bot_rails.rb
153
153
  - lib/llama_bot_rails/agent_auth.rb
154
+ - lib/llama_bot_rails/agent_auth_2.rb
154
155
  - lib/llama_bot_rails/agent_state_builder.rb
155
156
  - lib/llama_bot_rails/controller_extensions.rb
156
157
  - lib/llama_bot_rails/engine.rb
157
158
  - lib/llama_bot_rails/llama_bot.rb
158
159
  - lib/llama_bot_rails/railtie.rb
160
+ - lib/llama_bot_rails/route_helper.rb
159
161
  - lib/llama_bot_rails/tools/rails_console_tool.rb
160
162
  - lib/llama_bot_rails/version.rb
161
163
  - lib/tasks/llama_bot_rails_tasks.rake